]> source.dussan.org Git - archiva.git/commitdiff
(NOT COMPLETE YET) Commiting what I have for stability and memory utilization so...
authorJoakim Erdfelt <joakime@apache.org>
Wed, 14 Feb 2007 14:44:59 +0000 (14:44 +0000)
committerJoakim Erdfelt <joakime@apache.org>
Wed, 14 Feb 2007 14:44:59 +0000 (14:44 +0000)
* Overhauled archiva-discoverer to use a walker / publish / consumer type interface instead of in-memory lists of files.  This has had a reduction in memory over large repositories.  Expect an average of memory reduction of (22MB * Managed Repository Count).
* Overhauled archiva-reports-standard to use jpox as opposed to xml store to reduce memory usage.  Expect an average memory reduction of (45MB * Managed Repository Count).

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches@507550 13f79535-47bb-0310-9956-ffa450edef68

138 files changed:
archiva-MRM-239/archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java
archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java
archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java
archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java [new file with mode: 0644]
archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java [new file with mode: 0644]
archiva-MRM-239/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java [deleted file]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java [deleted file]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java [deleted file]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java [deleted file]
archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java
archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java
archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumerFactory.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/PathUtil.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/BuilderException.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LayoutArtifactBuilder.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/AbstractDiscovererConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java [deleted file]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilderTestCase.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilderTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilderTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/AbstractConsumerTestCase.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockArtifactConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockModelConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockRepositoryMetadataConsumer.java [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.xml [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.xml [new file with mode: 0644]
archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.xml [new file with mode: 0644]
archiva-MRM-239/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java
archiva-MRM-239/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java
archiva-MRM-239/archiva-reports-standard/pom.xml
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java [deleted file]
archiva-MRM-239/archiva-reports-standard/src/main/mdo/reporting.mdo
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AbstractRepositoryReportsTestCase.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ReportingDatabaseTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumArtifactReporterTest.java
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/DefaultArtifactReporterTest.java
archiva-MRM-239/archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/test/resources/log4j.properties [new file with mode: 0644]
archiva-MRM-239/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.xml
archiva-MRM-239/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.xml
archiva-MRM-239/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.xml
archiva-MRM-239/archiva-webapp/pom.xml
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java
archiva-MRM-239/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java
archiva-MRM-239/archiva-webapp/src/main/resources/META-INF/plexus/application.xml
archiva-MRM-239/pom.xml

index f9ba5cc9e6ba2e3051a9dd8041e24b05db50f1f8..5fff596c6f4059e19fab3fc9596bca0d91b30e31 100644 (file)
@@ -23,8 +23,8 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
 import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
 import org.apache.maven.archiva.discoverer.DiscovererException;
 import org.codehaus.plexus.PlexusContainer;
 import org.codehaus.plexus.tools.cli.AbstractCli;
@@ -115,7 +115,7 @@ public class ArchivaCli
             try
             {
                 legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath,
-                                                                   blacklistedPatterns, true );
+                                                                   true );
             }
             catch ( RepositoryConversionException e )
             {
index c723a11aba0f700dcef1c8762a2676e712366e8a..a2263c24598fc05765c1ad60d116d083d0b7f003 100644 (file)
@@ -38,7 +38,6 @@ import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
 import org.apache.maven.model.DistributionManagement;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
 import org.apache.maven.model.converter.ModelConverter;
 import org.apache.maven.model.converter.PomTranslationException;
 import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
@@ -84,17 +83,17 @@ public class DefaultRepositoryConverter
     /**
      * @plexus.requirement
      */
-    private ArtifactPomRewriter rewriter;
+    private ModelConverter translator;
 
     /**
      * @plexus.requirement
      */
-    private ModelConverter translator;
-
+    private ArtifactHandlerManager artifactHandlerManager;
+    
     /**
      * @plexus.requirement
      */
-    private ArtifactHandlerManager artifactHandlerManager;
+    private ReportingDatabase reportingDatabase;
 
     /**
      * @plexus.configuration default-value="false"
@@ -111,7 +110,7 @@ public class DefaultRepositoryConverter
      */
     private I18N i18n;
 
-    public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    public void convert( Artifact artifact, ArtifactRepository targetRepository )
         throws RepositoryConversionException
     {
         if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
@@ -119,13 +118,13 @@ public class DefaultRepositoryConverter
             throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
         }
 
-        if ( validateMetadata( artifact, reporter ) )
+        if ( validateMetadata( artifact ) )
         {
             FileTransaction transaction = new FileTransaction();
 
-            if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+            if ( copyPom( artifact, targetRepository, transaction ) )
             {
-                if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+                if ( copyArtifact( artifact, targetRepository, transaction ) )
                 {
                     Metadata metadata = createBaseMetadata( artifact );
                     Versioning versioning = new Versioning();
@@ -244,7 +243,7 @@ public class DefaultRepositoryConverter
         return metadata;
     }
 
-    private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
+    private boolean validateMetadata( Artifact artifact )
         throws RepositoryConversionException
     {
         ArtifactRepository repository = artifact.getRepository();
@@ -257,7 +256,7 @@ public class DefaultRepositoryConverter
         if ( file.exists() )
         {
             Metadata metadata = readMetadata( file );
-            result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+            result = validateMetadata( metadata, repositoryMetadata, artifact );
         }
 
         repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
@@ -265,14 +264,13 @@ public class DefaultRepositoryConverter
         if ( file.exists() )
         {
             Metadata metadata = readMetadata( file );
-            result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+            result = result && validateMetadata( metadata, repositoryMetadata, artifact );
         }
 
         return result;
     }
 
-    private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
-                                      ReportingDatabase reporter )
+    private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
     {
         String groupIdKey;
         String artifactIdKey = null;
@@ -302,14 +300,14 @@ public class DefaultRepositoryConverter
 
         if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
         {
-            addFailure( reporter, artifact, groupIdKey );
+            addFailure( artifact, groupIdKey );
             result = false;
         }
         if ( !repositoryMetadata.storedInGroupDirectory() )
         {
             if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
             {
-                addFailure( reporter, artifact, artifactIdKey );
+                addFailure( artifact, artifactIdKey );
                 result = false;
             }
             if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
@@ -332,7 +330,7 @@ public class DefaultRepositoryConverter
 
                 if ( !foundVersion )
                 {
-                    addFailure( reporter, artifact, versionsKey );
+                    addFailure( artifact, versionsKey );
                     result = false;
                 }
             }
@@ -341,7 +339,7 @@ public class DefaultRepositoryConverter
                 // snapshot metadata
                 if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
                 {
-                    addFailure( reporter, artifact, versionKey );
+                    addFailure( artifact, versionKey );
                     result = false;
                 }
 
@@ -364,7 +362,7 @@ public class DefaultRepositoryConverter
 
                         if ( !correct )
                         {
-                            addFailure( reporter, artifact, snapshotKey );
+                            addFailure( artifact, snapshotKey );
                             result = false;
                         }
                     }
@@ -374,26 +372,25 @@ public class DefaultRepositoryConverter
         return result;
     }
 
-    private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+    private void addFailure( Artifact artifact, String key )
     {
-        addFailureWithReason( reporter, artifact, getI18NString( key ) );
+        addFailureWithReason( artifact, getI18NString( key ) );
 
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+    private void addWarning( Artifact artifact, String message )
     {
         // TODO: should we be able to identify/fix these?
-        reporter.addWarning( artifact, null, null, message );
+        reportingDatabase.getArtifactDatabase().addWarning( artifact, null, null, message );
     }
 
-    private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+    private void addFailureWithReason( Artifact artifact, String reason )
     {
         // TODO: should we be able to identify/fix these?
-        reporter.addFailure( artifact, null, null, reason );
+        reportingDatabase.getArtifactDatabase().addFailure( artifact, null, null, reason );
     }
 
-    private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
-                             FileTransaction transaction )
+    private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
         throws RepositoryConversionException
     {
         Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
@@ -411,7 +408,7 @@ public class DefaultRepositoryConverter
             boolean checksumsValid = false;
             try
             {
-                if ( testChecksums( artifact, file, reporter ) )
+                if ( testChecksums( artifact, file ) )
                 {
                     checksumsValid = true;
                 }
@@ -479,12 +476,12 @@ public class DefaultRepositoryConverter
                     for ( Iterator i = warnings.iterator(); i.hasNext(); )
                     {
                         String message = (String) i.next();
-                        addWarning( reporter, artifact, message );
+                        addWarning( artifact, message );
                     }
                 }
                 catch ( XmlPullParserException e )
                 {
-                    addFailureWithReason( reporter, artifact,
+                    addFailureWithReason( artifact,
                                           getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
                     result = false;
                 }
@@ -494,7 +491,7 @@ public class DefaultRepositoryConverter
                 }
                 catch ( PomTranslationException e )
                 {
-                    addFailureWithReason( reporter, artifact,
+                    addFailureWithReason( artifact,
                                           getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
                     result = false;
                 }
@@ -506,7 +503,7 @@ public class DefaultRepositoryConverter
         }
         else
         {
-            addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
+            addWarning( artifact, getI18NString( "warning.missing.pom" ) );
         }
         return result;
     }
@@ -600,7 +597,7 @@ public class DefaultRepositoryConverter
         return i18n.getString( getClass().getName(), Locale.getDefault(), key );
     }
 
-    private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
+    private boolean testChecksums( Artifact artifact, File file )
         throws IOException
     {
         boolean result = true;
@@ -609,7 +606,7 @@ public class DefaultRepositoryConverter
         {
             Digester digester = (Digester) it.next();
             result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
-                                      reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
+                                      artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
         }
         return result;
     }
@@ -623,8 +620,8 @@ public class DefaultRepositoryConverter
         return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
     }
 
-    private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
-                                    Artifact artifact, String key )
+    private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact,
+                                    String key )
         throws IOException
     {
         boolean result = true;
@@ -639,15 +636,14 @@ public class DefaultRepositoryConverter
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, artifact, key );
+                addFailure( artifact, key );
                 result = false;
             }
         }
         return result;
     }
 
-    private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
-                                  FileTransaction transaction )
+    private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
         throws RepositoryConversionException
     {
         File sourceFile = artifact.getFile();
@@ -668,7 +664,7 @@ public class DefaultRepositoryConverter
                 matching = FileUtils.contentEquals( sourceFile, targetFile );
                 if ( !matching )
                 {
-                    addFailure( reporter, artifact, "failure.target.already.exists" );
+                    addFailure( artifact, "failure.target.already.exists" );
                     result = false;
                 }
             }
@@ -676,7 +672,7 @@ public class DefaultRepositoryConverter
             {
                 if ( force || !matching )
                 {
-                    if ( testChecksums( artifact, sourceFile, reporter ) )
+                    if ( testChecksums( artifact, sourceFile ) )
                     {
                         transaction.copyFile( sourceFile, targetFile, digesters );
                     }
@@ -694,7 +690,7 @@ public class DefaultRepositoryConverter
         return result;
     }
 
-    public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    public void convert( List artifacts, ArtifactRepository targetRepository )
         throws RepositoryConversionException
     {
         for ( Iterator i = artifacts.iterator(); i.hasNext(); )
@@ -703,7 +699,7 @@ public class DefaultRepositoryConverter
 
             try
             {
-                convert( artifact, targetRepository, reporter );
+                convert( artifact, targetRepository );
             }
             catch ( RepositoryConversionException e )
             {
@@ -716,7 +712,7 @@ public class DefaultRepositoryConverter
                 // the stack trace would be useful. I also have no idea what a processor is currently or
                 // how to get hold of it here.
 
-                reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
+                reportingDatabase.getArtifactDatabase().addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
             }
         }
     }
index d47d5b1f9ccd3bece021d775f4c2e0927a7e830e..8f1056926191f162c7d1e764ec0c2b2861d786e9 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.converter;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 
@@ -39,9 +38,8 @@ public interface RepositoryConverter
      *
      * @param artifact         the artifact to convert
      * @param targetRepository the target repository
-     * @param reporter         reporter to track the results of the conversion
      */
-    void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    void convert( Artifact artifact, ArtifactRepository targetRepository )
         throws RepositoryConversionException;
 
     /**
@@ -49,8 +47,7 @@ public interface RepositoryConverter
      *
      * @param artifacts        the set of artifacts to convert
      * @param targetRepository the target repository
-     * @param reporter         reporter to track the results of the conversions
      */
-    void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    void convert( List artifacts, ArtifactRepository targetRepository )
         throws RepositoryConversionException;
 }
diff --git a/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java b/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java
new file mode 100644 (file)
index 0000000..253a540
--- /dev/null
@@ -0,0 +1,100 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumerFactory;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Jason van Zyl
+ * @plexus.component
+ * @todo turn this into a general conversion component and hide all this crap here.
+ * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
+ */
+public class DefaultLegacyRepositoryConverter
+    implements LegacyRepositoryConverter
+{
+    /**
+     * @plexus.requirement role-hint="legacy"
+     */
+    private ArtifactRepositoryLayout legacyLayout;
+
+    /**
+     * @plexus.requirement role-hint="default"
+     */
+    private ArtifactRepositoryLayout defaultLayout;
+
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactRepositoryFactory artifactRepositoryFactory;
+
+    /**
+     * @plexus.requirement role-hint="default"
+     */
+    private Discoverer discoverer;
+
+    /**
+     * @plexus.requirement
+     */
+    private DiscovererConsumerFactory consumerFactory;
+
+    public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
+                                         boolean includeSnapshots )
+        throws RepositoryConversionException, DiscovererException
+    {
+        ArtifactRepository legacyRepository;
+
+        ArtifactRepository repository;
+
+        try
+        {
+            legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDirectory
+                .toURI().toURL().toString(), legacyLayout, null, null );
+
+            repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDirectory.toURI()
+                .toURL().toString(), defaultLayout, null, null );
+        }
+        catch ( MalformedURLException e )
+        {
+            throw new RepositoryConversionException( "Error convering legacy repository.", e );
+        }
+
+        List consumers = new ArrayList();
+
+        LegacyConverterArtifactConsumer consumer = (LegacyConverterArtifactConsumer) consumerFactory
+            .createConsumer( "legacy-converter" );
+        consumer.setDestinationRepository( repository );
+
+        consumers.add( consumer );
+
+        discoverer.walkRepository( legacyRepository, consumers, includeSnapshots );
+    }
+}
diff --git a/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java b/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java
new file mode 100644 (file)
index 0000000..5bf27a9
--- /dev/null
@@ -0,0 +1,79 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * LegacyConverterArtifactConsumer - convert artifacts as they are found
+ * into the destination repository. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ *     role-hint="legacy-converter"
+ *     instantiation-strategy="per-lookup"
+ */
+public class LegacyConverterArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryConverter repositoryConverter;
+
+    private ArtifactRepository destinationRepository;
+
+    public void processArtifact( Artifact artifact, File file )
+    {
+        try
+        {
+            repositoryConverter.convert( artifact, destinationRepository );
+        }
+        catch ( RepositoryConversionException e )
+        {
+            getLogger().error(
+                               "Unable to convert artifact " + artifact + " to destination repository "
+                                   + destinationRepository, e );
+        }
+    }
+
+    public void processArtifactBuildFailure( File path, String message )
+    {
+        getLogger().error( "Artifact Build Failure on " + path + " : " + message );
+    }
+
+    public ArtifactRepository getDestinationRepository()
+    {
+        return destinationRepository;
+    }
+
+    public void setDestinationRepository( ArtifactRepository destinationRepository )
+    {
+        this.destinationRepository = destinationRepository;
+    }
+}
diff --git a/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java b/archiva-MRM-239/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java
new file mode 100644 (file)
index 0000000..12033dc
--- /dev/null
@@ -0,0 +1,45 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+import java.io.File;
+
+/**
+ * @author Jason van Zyl
+ */
+public interface LegacyRepositoryConverter
+{
+    String ROLE = LegacyRepositoryConverter.class.getName();
+
+    /**
+     * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
+     * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
+     *
+     * @param legacyRepositoryDirectory
+     * @param repositoryDirectory
+     * @throws org.apache.maven.archiva.converter.RepositoryConversionException
+     *
+     */
+    void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, boolean includeSnapshots )
+        throws RepositoryConversionException, DiscovererException;
+}
index 15d5f187c554db10a3feff17d9699c6e00b6d5e5..a79a7b4e8ab05a0b225e981dd42dba452f1a39dd 100644 (file)
@@ -21,7 +21,6 @@ package org.apache.maven.archiva.converter;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -96,9 +95,6 @@ public class RepositoryConverterTest
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
 
         i18n = (I18N) lookup( I18N.ROLE );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
     }
 
     private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
@@ -167,7 +163,7 @@ public class RepositoryConverterTest
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         artifactFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -209,7 +205,7 @@ public class RepositoryConverterTest
                                              targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -250,7 +246,7 @@ public class RepositoryConverterTest
                                              targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         //checkSuccess();  --> commented until MNG-2100 is fixed
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -285,7 +281,7 @@ public class RepositoryConverterTest
                                              targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
         assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() );
         assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
@@ -320,7 +316,7 @@ public class RepositoryConverterTest
                                               targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -363,7 +359,7 @@ public class RepositoryConverterTest
                                               targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -414,7 +410,7 @@ public class RepositoryConverterTest
             createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
         artifact.setFile(
             new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         // There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
         // the plugin is being coverted correctly.
         //checkSuccess();
@@ -450,7 +446,7 @@ public class RepositoryConverterTest
                                               targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -483,7 +479,7 @@ public class RepositoryConverterTest
         // test that a POM is not created when there was none at the source
 
         Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
         assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() );
         assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
@@ -510,7 +506,7 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
 
@@ -531,7 +527,7 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
 
@@ -568,7 +564,7 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         compareFiles( sourceFile, targetFile );
@@ -604,7 +600,7 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
                       getFailure().getReason() );
@@ -641,7 +637,7 @@ public class RepositoryConverterTest
         sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
         sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         compareFiles( sourceFile, targetFile );
@@ -671,7 +667,7 @@ public class RepositoryConverterTest
         File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         assertTrue( "Check source file exists", sourceFile.exists() );
@@ -713,7 +709,7 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
                       getFailure().getReason() );
@@ -744,7 +740,7 @@ public class RepositoryConverterTest
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
         assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
@@ -763,7 +759,7 @@ public class RepositoryConverterTest
         artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
         artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
         artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
-        repositoryConverter.convert( artifacts, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifacts, targetRepository );
         assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
         assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
         assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
@@ -797,7 +793,7 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
                       getFailure().getReason() );
@@ -821,7 +817,7 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkFailure();
         assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
                       getFailure().getReason() );
@@ -841,7 +837,7 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         checkSuccess();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
@@ -879,7 +875,7 @@ public class RepositoryConverterTest
 
         try
         {
-            repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+            repositoryConverter.convert( artifact, targetRepository );
             fail( "Should have failed trying to convert within the same repository" );
         }
         catch ( RepositoryConversionException e )
index 940b6bde2e0aaa1a8884ff3dd08245ecda18c184..f2251a3383ef68cfbdd2714d6fcdfb30a276ec35 100644 (file)
@@ -27,7 +27,7 @@ import java.util.Map;
 /**
  * ManagedArtifact 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ManagedArtifact
index 6cccfcd6a738c8ebf35fa75fa3c508f6562a1d7e..780b41a95d905e759ca12a7f30bdd5b9d1802fa5 100644 (file)
@@ -27,7 +27,7 @@ import java.util.List;
 /**
  * ManagedArtifactTypes - provides place to test an unknown artifact type.
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ManagedArtifactTypes
index eacf1cbdfc5e0ec451134a36e205b597a02a09c4..f9036330cbb6f253f8bf09fc051adfd3c1655686 100644 (file)
@@ -24,7 +24,7 @@ import org.apache.maven.artifact.Artifact;
 /**
  * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too. 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ManagedEjbArtifact
index babb884207ad03b30bf88a9661bf03bf9b13245e..2665949259c9fcb2b7702894af2acee70a0c3efa 100644 (file)
@@ -25,7 +25,7 @@ import org.apache.maven.artifact.Artifact;
  * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source 
  * reference jars.
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ManagedJavaArtifact
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java
new file mode 100644 (file)
index 0000000..b1dc839
--- /dev/null
@@ -0,0 +1,92 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+
+import java.io.File;
+import java.util.Collections;
+
+/**
+ * ArtifactHealthConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ *     role-hint="artifact-health"
+ *     instantiation-strategy="per-lookup"
+ */
+public class ArtifactHealthConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
+    /**
+     * @plexus.requirement role-hint="health"
+     */
+    private ReportGroup health;
+
+    /**
+     * @plexus.requirement
+     */
+    private MavenProjectBuilder projectBuilder;
+
+    public void processArtifact( Artifact artifact, File file )
+    {
+        Model model = null;
+        try
+        {
+            Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+                .getArtifactId(), artifact.getVersion() );
+            MavenProject project = projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+            model = project.getModel();
+        }
+        catch ( InvalidArtifactRTException e )
+        {
+            database.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
+        }
+        catch ( ProjectBuildingException e )
+        {
+            database.addWarning( artifact, null, null, "Error reading project model: " + e );
+        }
+        
+        database.remove( artifact );
+        health.processArtifact( artifact, model );
+    }
+
+    public void processArtifactBuildFailure( File path, String message )
+    {
+        /* do nothing here (yet) */
+        // TODO: store build failure into database?
+    }
+}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java
new file mode 100644 (file)
index 0000000..6ec2dca
--- /dev/null
@@ -0,0 +1,94 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * IndexArtifactConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ *     role-hint="index-artifact"
+ *     instantiation-strategy="per-lookup"
+ */
+public class IndexArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryArtifactIndexFactory indexFactory;
+    
+    /**
+     * @plexus.requirement role-hint="standard"
+     */
+    private RepositoryIndexRecordFactory recordFactory;
+
+    /**
+     * Configuration store.
+     *
+     * @plexus.requirement
+     */
+    private ArchivaConfiguration archivaConfiguration;
+
+    private RepositoryArtifactIndex index;
+
+    public boolean init( ArtifactRepository repository )
+    {
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        File indexPath = new File( configuration.getIndexPath() );
+
+        index = indexFactory.createStandardIndex( indexPath );
+
+        return super.init( repository );
+    }
+
+    public void processArtifact( Artifact artifact, File file )
+    {
+        try
+        {
+            index.indexArtifact( artifact, recordFactory );
+        }
+        catch ( RepositoryIndexException e )
+        {
+            getLogger().warn( "Unable to index artifact " + artifact, e );
+        }
+    }
+
+    public void processArtifactBuildFailure( File path, String message )
+    {
+        // TODO Auto-generated method stub
+
+    }
+}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java
new file mode 100644 (file)
index 0000000..5eb297c
--- /dev/null
@@ -0,0 +1,60 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.consumers.GenericRepositoryMetadataConsumer;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.io.File;
+
+/**
+ * RepositoryMetadataHealthConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ *     role-hint="metadata-health"
+ *     instantiation-strategy="per-lookup"
+ */
+public class RepositoryMetadataHealthConsumer
+    extends GenericRepositoryMetadataConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
+    /**
+     * @plexus.requirement role-hint="health"
+     */
+    private ReportGroup health;
+
+    public void processRepositoryMetadata( RepositoryMetadata metadata, File file )
+    {
+        MetadataResults results = database.getMetadataResults( metadata );
+        database.clearResults( results );
+
+        health.processMetadata( metadata, repository );
+    }
+}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java
deleted file mode 100644 (file)
index 7beb18a..0000000
+++ /dev/null
@@ -1,126 +0,0 @@
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.converter.RepositoryConverter;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- * @plexus.component
- * @todo turn this into a general conversion component and hide all this crap here.
- * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
- */
-public class DefaultLegacyRepositoryConverter
-    implements LegacyRepositoryConverter
-{
-    /**
-     * @plexus.requirement role-hint="legacy"
-     */
-    private ArtifactDiscoverer artifactDiscoverer;
-
-    /**
-     * @plexus.requirement role-hint="legacy"
-     */
-    private ArtifactRepositoryLayout legacyLayout;
-
-    /**
-     * @plexus.requirement role-hint="default"
-     */
-    private ArtifactRepositoryLayout defaultLayout;
-
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactRepositoryFactory artifactRepositoryFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryConverter repositoryConverter;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportingStore reportingStore;
-
-    /**
-     * @plexus.requirement role-hint="health"
-     */
-    private ReportGroup reportGroup;
-
-    public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
-                                         List blacklistedPatterns, boolean includeSnapshots )
-        throws RepositoryConversionException, DiscovererException
-    {
-        ArtifactRepository legacyRepository;
-
-        ArtifactRepository repository;
-
-        try
-        {
-            legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
-                                                                                   legacyRepositoryDirectory.toURI().toURL().toString(),
-                                                                                   legacyLayout, null, null );
-
-            repository = artifactRepositoryFactory.createArtifactRepository( "default",
-                                                                             repositoryDirectory.toURI().toURL().toString(),
-                                                                             defaultLayout, null, null );
-        }
-        catch ( MalformedURLException e )
-        {
-            throw new RepositoryConversionException( "Error convering legacy repository.", e );
-        }
-
-        ArtifactFilter filter =
-            includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter();
-        List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, blacklistedPatterns, filter );
-
-        ReportingDatabase reporter;
-        try
-        {
-            reporter = reportingStore.getReportsFromStore( repository, reportGroup );
-
-            repositoryConverter.convert( legacyArtifacts, repository, reporter );
-
-            reportingStore.storeReports( reporter, repository );
-        }
-        catch ( ReportingStoreException e )
-        {
-            throw new RepositoryConversionException( "Error convering legacy repository.", e );
-        }
-    }
-}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java
deleted file mode 100644 (file)
index 8764214..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- */
-public interface LegacyRepositoryConverter
-{
-    String ROLE = LegacyRepositoryConverter.class.getName();
-
-    /**
-     * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
-     * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
-     *
-     * @param legacyRepositoryDirectory
-     * @param repositoryDirectory
-     * @throws org.apache.maven.archiva.converter.RepositoryConversionException
-     *
-     */
-    void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, List blacklistedPatterns,
-                                  boolean includeSnapshots )
-        throws RepositoryConversionException, DiscovererException;
-}
index 554fb34914afb35d07c12550a8551666c50165b0..f2ce88b050f354b93dcdd8fc0acf8ea61f9d3663 100644 (file)
@@ -31,7 +31,7 @@ import java.util.List;
 /**
  * ActiveManagedRepositories
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public interface ActiveManagedRepositories
index 128a91d66e2f1abc940c23da0146e7c531c1b30a..719ae09e6edea27b4087cc8b527d961132909ee9 100644 (file)
@@ -49,7 +49,7 @@ import java.util.List;
 /**
  * DefaultActiveManagedRepositories
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories"
  */
index 04ab088d4daae4c947d02e4e642563adb4022d5f..a3bd8e6b4af8607bf8a4302eee364f6cef1e324f 100644 (file)
@@ -24,8 +24,7 @@ import org.apache.maven.archiva.configuration.Configuration;
 import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
 import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
 import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
 import org.apache.maven.archiva.scheduler.task.RepositoryTask;
 import org.codehaus.plexus.logging.AbstractLogEnabled;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
@@ -61,14 +60,9 @@ public class DefaultRepositoryTaskScheduler
     private Scheduler scheduler;
 
     /**
-     * @plexus.requirement role-hint="indexer"
+     * @plexus.requirement role-hint="data-refresh"
      */
-    private TaskQueue indexerQueue;
-
-    /**
-     * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
-     */
-    private IndexerTaskExecutor indexerTaskExecutor;
+    private TaskQueue datarefreshQueue;
 
     /**
      * @plexus.requirement
@@ -135,7 +129,7 @@ public class DefaultRepositoryTaskScheduler
         JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
 
         JobDataMap dataMap = new JobDataMap();
-        dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
+        dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
         dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
         jobDetail.setJobDataMap( dataMap );
 
@@ -199,14 +193,14 @@ public class DefaultRepositoryTaskScheduler
         }
     }
 
-    public void runIndexer()
+    public void runDataRefresh()
         throws org.apache.maven.archiva.scheduler.TaskExecutionException
     {
-        IndexerTask task = new IndexerTask();
-        task.setJobName( "INDEX_INIT" );
+        DataRefreshTask task = new DataRefreshTask();
+        task.setJobName( "DATA_REFRESH_INIT" );
         try
         {
-            indexerQueue.put( task );
+            datarefreshQueue.put( task );
         }
         catch ( TaskQueueException e )
         {
@@ -226,7 +220,7 @@ public class DefaultRepositoryTaskScheduler
             RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
             if ( !artifactIndex.exists() )
             {
-                runIndexer();
+                runDataRefresh();
             }
         }
         catch ( RepositoryIndexException e )
index dc3c26ace1344c7d43eb6480c1e250578bb3d1cd..4b712ad8a80b7b0384ef38974d935e57f54add97 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.scheduler;
  * under the License.
  */
 
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
 import org.apache.maven.archiva.scheduler.task.RepositoryTask;
 import org.codehaus.plexus.scheduler.AbstractJob;
 import org.codehaus.plexus.taskqueue.TaskQueue;
@@ -53,23 +53,23 @@ public class RepositoryTaskJob
         JobDataMap dataMap = context.getJobDetail().getJobDataMap();
         setJobDataMap( dataMap );
 
-        TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+        TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
         String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
 
-        RepositoryTask task = new IndexerTask();
+        RepositoryTask task = new DataRefreshTask();
         task.setJobName( context.getJobDetail().getName() );
 
         try
         {
-            if ( indexerQueue.getQueueSnapshot().size() == 0 )
+            if ( taskQueue.getQueueSnapshot().size() == 0 )
             {
-                indexerQueue.put( task );
+                taskQueue.put( task );
             }
             else
             {
                 if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
                 {
-                    indexerQueue.put( task );
+                    taskQueue.put( task );
                 }
                 else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
                 {
index c63556ff5f57af183f87ab109c7fa5faffbfabd4..8fef679529c129f18191e8ac0020c7e220d63694 100644 (file)
@@ -31,7 +31,7 @@ public interface RepositoryTaskScheduler
      */
     String ROLE = RepositoryTaskScheduler.class.getName();
 
-    void runIndexer()
+    void runDataRefresh()
         throws TaskExecutionException;
 
 }
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java
new file mode 100644 (file)
index 0000000..5fd32c7
--- /dev/null
@@ -0,0 +1,184 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumerFactory;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DataRefreshExecutor 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" 
+ *      role-hint="data-refresh"
+ */
+public class DataRefreshExecutor
+    extends AbstractLogEnabled
+    implements TaskExecutor
+{
+    /**
+     * Configuration store.
+     *
+     * @plexus.requirement
+     */
+    private ArchivaConfiguration archivaConfiguration;
+
+    /**
+     * @plexus.requirement
+     */
+    private ConfiguredRepositoryFactory repoFactory;
+
+    /**
+     * @plexus.configuration default-value="index-artifact"
+     */
+    private List consumerNames;
+
+    /**
+     * @plexus.requirement
+     */
+    private Discoverer discoverer;
+
+    /**
+     * @plexus.requirement
+     */
+    private DiscovererConsumerFactory consumerFactory;
+
+    public void executeTask( Task task )
+        throws TaskExecutionException
+    {
+        DataRefreshTask indexerTask = (DataRefreshTask) task;
+
+        getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
+
+        execute();
+    }
+
+    private String toHumanTimestamp( long timestamp )
+    {
+        SimpleDateFormat dateFormat = new SimpleDateFormat();
+        return dateFormat.format( new Date( timestamp ) );
+    }
+
+    public void execute()
+        throws TaskExecutionException
+    {
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        List consumers = new ArrayList();
+
+        for ( Iterator it = consumerNames.iterator(); it.hasNext(); )
+        {
+            String name = (String) it.next();
+            try
+            {
+                DiscovererConsumer consumer = consumerFactory.createConsumer( name );
+                consumers.add( consumer );
+            }
+            catch ( DiscovererException e )
+            {
+                throw new TaskExecutionException( e.getMessage(), e );
+            }
+        }
+
+        long time = System.currentTimeMillis();
+
+        for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+        {
+            RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+            if ( !repositoryConfiguration.isIndexed() )
+            {
+                continue;
+            }
+
+            ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+
+            List filteredConsumers = filterConsumers( consumers, repository );
+
+            DiscovererStatistics stats = discoverer.scanRepository( repository, filteredConsumers,
+                                                                    repositoryConfiguration.isIncludeSnapshots() );
+
+            getLogger().info( "----------------------------------------------------" );
+            getLogger().info( "Scan of Repository: " + repository.getId() );
+            getLogger().info( "   Started : " + toHumanTimestamp( stats.getTimestampStarted() ) );
+            getLogger().info( "   Finished: " + toHumanTimestamp( stats.getTimestampFinished() ) );
+            // TODO: pretty print ellapsed time.
+            getLogger().info( "   Duration: " + stats.getElapsedMilliseconds() + "ms" );
+            getLogger().info( "   Files   : " + stats.getFilesIncluded() );
+            getLogger().info( "   Consumed: " + stats.getFilesConsumed() );
+            getLogger().info( "   Skipped : " + stats.getFilesSkipped() );
+        }
+
+        time = System.currentTimeMillis() - time;
+
+        getLogger().info( "Finished data refresh process in " + time + "ms." );
+    }
+
+    /**
+     * Not all consumers work with all repositories.
+     * This will filter out those incompatible consumers based on the provided repository.
+     * 
+     * @param consumers the initial list of consumers.
+     * @param repository the repository to test consumer against.
+     * @return the filtered list of consumers.
+     */
+    private List filterConsumers( List consumers, ArtifactRepository repository )
+    {
+        List filtered = new ArrayList();
+
+        for ( Iterator it = consumers.iterator(); it.hasNext(); )
+        {
+            DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+            if ( consumer.init( repository ) )
+            {
+                // Approved!
+                filtered.add( consumer );
+            }
+            else
+            {
+                getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository );
+            }
+        }
+
+        return filtered;
+    }
+
+}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java
deleted file mode 100644 (file)
index 25d3905..0000000
+++ /dev/null
@@ -1,317 +0,0 @@
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.filter.ReportingMetadataFilter;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.taskqueue.Task;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Edwin Punzalan
- * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
-public class IndexerTaskExecutor
-    extends AbstractLogEnabled
-    implements TaskExecutor
-{
-    /**
-     * Configuration store.
-     *
-     * @plexus.requirement
-     */
-    private ArchivaConfiguration archivaConfiguration;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryArtifactIndexFactory indexFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ConfiguredRepositoryFactory repoFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    /**
-     * @plexus.requirement role-hint="standard"
-     */
-    private RepositoryIndexRecordFactory recordFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportExecutor reportExecutor;
-
-    /**
-     * @plexus.requirement role-hint="health"
-     */
-    private ReportGroup reportGroup;
-
-    private long lastIndexingTime = 0;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public long getLastIndexingTime()
-    {
-        return lastIndexingTime;
-    }
-
-    public void executeTask( Task task )
-        throws TaskExecutionException
-    {
-        IndexerTask indexerTask = (IndexerTask) task;
-
-        getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
-
-        execute();
-    }
-
-    public void execute()
-        throws TaskExecutionException
-    {
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        execute( configuration, indexPath );
-    }
-
-    public void executeNowIfNeeded()
-        throws TaskExecutionException
-    {
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        try
-        {
-            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
-            if ( !artifactIndex.exists() )
-            {
-                execute( configuration, indexPath );
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-    }
-
-    private void execute( Configuration configuration, File indexPath )
-        throws TaskExecutionException
-    {
-        long time = System.currentTimeMillis();
-        getLogger().info( "Starting repository indexing process" );
-
-        RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
-
-        try
-        {
-            Collection keys;
-            if ( index.exists() )
-            {
-                keys = index.getAllRecordKeys();
-            }
-            else
-            {
-                keys = Collections.EMPTY_LIST;
-            }
-
-            for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
-            {
-                RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
-                if ( repositoryConfiguration.isIndexed() )
-                {
-                    List blacklistedPatterns = new ArrayList();
-                    if ( repositoryConfiguration.getBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
-                    }
-                    if ( configuration.getGlobalBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
-                    }
-                    boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
-                    ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
-                    ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
-
-                    // keep original value in case there is another process under way
-                    long origStartTime = reporter.getStartTime();
-                    reporter.setStartTime( System.currentTimeMillis() );
-
-                    // Discovery process
-                    String layoutProperty = repositoryConfiguration.getLayout();
-                    ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-                    AndArtifactFilter filter = new AndArtifactFilter();
-                    filter.add( new IndexRecordExistsArtifactFilter( keys ) );
-                    if ( !includeSnapshots )
-                    {
-                        filter.add( new SnapshotArtifactFilter() );
-                    }
-
-                    // Save some memory by not tracking paths we won't use
-                    // TODO: Plexus CDC should be able to inject this configuration
-                    discoverer.setTrackOmittedPaths( false );
-
-                    getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
-                    List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-                    if ( !artifacts.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
-
-                        // Work through these in batches, then flush the project cache.
-                        for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-                        {
-                            int end = j + ARTIFACT_BUFFER_SIZE;
-                            List currentArtifacts =
-                                artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                            // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                            // run the reports. Done intermittently to avoid losing track of what is indexed since
-                            // that is what the filter is based on.
-                            reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                            index.indexArtifacts( currentArtifacts, recordFactory );
-
-                            // MRM-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                            // around that. TODO: remove when it is configurable
-                            flushProjectBuilderCacheHack();
-                        }
-                    }
-
-                    MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
-
-                    MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers
-                        .get( layoutProperty );
-                    List metadata =
-                        metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
-
-                    if ( !metadata.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
-
-                        // run the reports
-                        reportExecutor.runMetadataReports( reportGroup, metadata, repository );
-                    }
-
-                    reporter.setStartTime( origStartTime );
-                }
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( DiscovererException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( ReportingStoreException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-
-        time = System.currentTimeMillis() - time;
-        lastIndexingTime = System.currentTimeMillis();
-        getLogger().info( "Finished repository indexing process in " + time + "ms" );
-    }
-
-    /**
-     * @todo remove when no longer needed (MRM-142)
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    private void flushProjectBuilderCacheHack()
-    {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                getLogger().info( "projectBuilder is type " + projectBuilder.getClass().getName() );
-
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                getLogger().info( "projectBuilder.raw is type " + cache.getClass().getName() );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                getLogger().info( "projectBuilder.processed is type " + cache.getClass().getName() );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java
new file mode 100644 (file)
index 0000000..57d4b68
--- /dev/null
@@ -0,0 +1,41 @@
+package org.apache.maven.archiva.scheduler.task;
+
+/**
+ * DataRefreshTask - task for discovering changes in the repository 
+ * and updating all associated data. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshTask
+    implements RepositoryTask
+{
+    private String jobName;
+
+    private String policy;
+
+    public String getJobName()
+    {
+        return jobName;
+    }
+
+    public String getQueuePolicy()
+    {
+        return policy;
+    }
+
+    public void setJobName( String jobName )
+    {
+        this.jobName = jobName;
+    }
+
+    public void setQueuePolicy( String policy )
+    {
+        this.policy = policy;
+    }
+
+    public long getMaxExecutionTime()
+    {
+        return 0;
+    }
+}
diff --git a/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java b/archiva-MRM-239/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java
deleted file mode 100644 (file)
index a4cd2f6..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-package org.apache.maven.archiva.scheduler.task;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Task for discovering changes in the repository and updating the index accordingly.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class IndexerTask
-    implements RepositoryTask
-{
-    private String jobName;
-
-    private String policy;
-
-    public long getMaxExecutionTime()
-    {
-        return 0;
-    }
-
-    public String getJobName()
-    {
-        return jobName;
-    }
-
-    public String getQueuePolicy()
-    {
-        return policy;
-    }
-
-    public void setQueuePolicy( String policy )
-    {
-        this.policy = policy;
-    }
-
-    public void setJobName( String jobName )
-    {
-        this.jobName = jobName;
-    }
-
-
-}
index bd13a7deb936243d6f6737b77908549e4d658bfb..748376a8abbaac5b421bef7a731674f91aff9590 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva;
  * under the License.
  */
 
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
 import org.codehaus.plexus.PlexusTestCase;
 
 import java.io.File;
@@ -39,6 +39,6 @@ public class LegacyRepositoryConverterTest
 
         LegacyRepositoryConverter rm = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE );
 
-        rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, null, true );
+        rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, true );
     }
 }
index a535307e2b62863e71d9c679aaa9f67d949693fd..d46881aaef69241f9def0eb228d3c5e32212eb0e 100644 (file)
@@ -27,7 +27,7 @@ import org.codehaus.plexus.PlexusTestCase;
 /**
  * DefaultActiveManagedRepositoriesTest
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class DefaultActiveManagedRepositoriesTest
diff --git a/archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java b/archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java
new file mode 100644 (file)
index 0000000..47d1d5c
--- /dev/null
@@ -0,0 +1,75 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.File;
+
+/**
+ * IndexerTaskExecutorTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshExecutorTest
+    extends PlexusTestCase
+{
+    private TaskExecutor taskExecutor;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" );
+
+        ArchivaConfiguration archivaConfiguration =
+            (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        File indexPath = new File( configuration.getIndexPath() );
+        if ( indexPath.exists() )
+        {
+            FileUtils.deleteDirectory( indexPath );
+        }
+    }
+
+    public void testIndexer()
+        throws TaskExecutionException
+    {
+        taskExecutor.executeTask( new TestDataRefreshTask() );
+    }
+
+    class TestDataRefreshTask
+        extends DataRefreshTask
+    {
+        public String getJobName()
+        {
+            return "TestDataRefresh";
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java b/archiva-MRM-239/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java
deleted file mode 100644 (file)
index 8729e0c..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-
-/**
- * IndexerTaskExecutorTest
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class IndexerTaskExecutorTest
-    extends PlexusTestCase
-{
-    private TaskExecutor taskExecutor;
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "indexer" );
-
-        ArchivaConfiguration archivaConfiguration =
-            (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-        if ( indexPath.exists() )
-        {
-            FileUtils.deleteDirectory( indexPath );
-        }
-    }
-
-    public void testIndexer()
-        throws TaskExecutionException
-    {
-        taskExecutor.executeTask( new TestIndexerTask() );
-    }
-
-    class TestIndexerTask
-        extends IndexerTask
-    {
-        public String getJobName()
-        {
-            return "TestIndexer";
-        }
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java
deleted file mode 100644 (file)
index 0f4b7bf..0000000
+++ /dev/null
@@ -1,115 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for artifact discoverers.
- *
- * @author John Casey
- * @author Brett Porter
- */
-public abstract class AbstractArtifactDiscoverer
-    extends AbstractDiscoverer
-    implements ArtifactDiscoverer
-{
-    /**
-     * Standard patterns to exclude from discovery as they are not artifacts.
-     */
-    private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".index", ".reports/**",
-        ".maven/**", "**/*.md5", "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**",
-        "*/licenses/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*",
-        "**/CHANGELOG*", "**/KEYS*"};
-
-    private List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns )
-    {
-        return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES );
-    }
-
-    public List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
-        throws DiscovererException
-    {
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
-        }
-
-        File repositoryBase = new File( repository.getBasedir() );
-
-        List artifacts = new ArrayList();
-
-        List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns );
-
-        for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
-        {
-            String path = (String) i.next();
-
-            try
-            {
-                Artifact artifact = buildArtifactFromPath( path, repository );
-
-                if ( filter.include( artifact ) )
-                {
-                    artifacts.add( artifact );
-                }
-                else
-                {
-                    addExcludedPath( path, "Omitted by filter" );
-                }
-            }
-            catch ( DiscovererException e )
-            {
-                addKickedOutPath( path, e.getMessage() );
-            }
-        }
-
-        return artifacts;
-    }
-
-    /**
-     * Returns an artifact object that is represented by the specified path in a repository
-     *
-     * @param path       The path that is pointing to an artifact
-     * @param repository The repository of the artifact
-     * @return Artifact
-     * @throws DiscovererException when the specified path does correspond to an artifact
-     */
-    public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
-        throws DiscovererException
-    {
-        Artifact artifact = buildArtifact( path );
-
-        if ( artifact != null )
-        {
-            artifact.setRepository( repository );
-            artifact.setFile( new File( repository.getBasedir(), path ) );
-        }
-
-        return artifact;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java
deleted file mode 100644 (file)
index 7e0ee4f..0000000
+++ /dev/null
@@ -1,158 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.DirectoryScanner;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for the artifact and metadata discoverers.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDiscoverer
-    extends AbstractLogEnabled
-    implements Discoverer
-{
-    private List kickedOutPaths = new ArrayList();
-
-    /**
-     * @plexus.requirement
-     */
-    protected ArtifactFactory artifactFactory;
-
-    private static final String[] EMPTY_STRING_ARRAY = new String[0];
-
-    private List excludedPaths = new ArrayList();
-
-    /**
-     * @plexus.configuration default-value="true"
-     */
-    private boolean trackOmittedPaths;
-
-    /**
-     * Add a path to the list of files that were kicked out due to being invalid.
-     *
-     * @param path   the path to add
-     * @param reason the reason why the path is being kicked out
-     */
-    protected void addKickedOutPath( String path, String reason )
-    {
-        if ( trackOmittedPaths )
-        {
-            kickedOutPaths.add( new DiscovererPath( path, reason ) );
-        }
-    }
-
-    /**
-     * Add a path to the list of files that were excluded.
-     *
-     * @param path   the path to add
-     * @param reason the reason why the path is excluded
-     */
-    protected void addExcludedPath( String path, String reason )
-    {
-        excludedPaths.add( new DiscovererPath( path, reason ) );
-    }
-
-    /**
-     * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
-     *
-     * @return Iterator for the DiscovererPath List
-     */
-    public Iterator getKickedOutPathsIterator()
-    {
-        assert trackOmittedPaths;
-        return kickedOutPaths.iterator();
-    }
-
-    protected List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns, String[] includes,
-                                         String[] excludes )
-    {
-        List allExcludes = new ArrayList();
-        allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
-        if ( excludes != null )
-        {
-            allExcludes.addAll( Arrays.asList( excludes ) );
-        }
-        if ( blacklistedPatterns != null )
-        {
-            allExcludes.addAll( blacklistedPatterns );
-        }
-
-        DirectoryScanner scanner = new DirectoryScanner();
-
-        scanner.setBasedir( repositoryBase );
-
-        if ( includes != null )
-        {
-            scanner.setIncludes( includes );
-        }
-        scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
-
-        // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
-        scanner.scan();
-
-        if ( trackOmittedPaths )
-        {
-            for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
-            {
-                String path = files.next().toString();
-
-                excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
-            }
-        }
-
-        // TODO: this could be a part of the scanner
-        List includedPaths = new ArrayList();
-        for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
-        {
-            String path = files.next().toString();
-
-            includedPaths.add( path );
-        }
-
-        return includedPaths;
-    }
-
-    /**
-     * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
-     *
-     * @return Iterator for the DiscovererPath List
-     */
-    public Iterator getExcludedPathsIterator()
-    {
-        assert trackOmittedPaths;
-        return excludedPaths.iterator();
-    }
-
-    public void setTrackOmittedPaths( boolean trackOmittedPaths )
-    {
-        this.trackOmittedPaths = trackOmittedPaths;
-    }
-}
index 50873b284ea1b9714747b175383d3ab165f39493..41270b6801dfa69fea28d5795ca9e696228fdb16 100644 (file)
@@ -20,39 +20,19 @@ package org.apache.maven.archiva.discoverer;
  */
 
 import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
 
 /**
  * Interface for implementation that can discover artifacts within a repository.
  *
  * @author John Casey
  * @author Brett Porter
- * @todo do we want blacklisted patterns in another form? Part of the object construction?
- * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
- * @todo instead of a returned list, should a listener be passed in?
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  */
 public interface ArtifactDiscoverer
     extends Discoverer
 {
     String ROLE = ArtifactDiscoverer.class.getName();
 
-    /**
-     * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
-     * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
-     * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
-     *
-     * @param repository          the location of the repository
-     * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
-     * @param filter              filter for artifacts to include in the discovered list
-     * @return the list of artifacts discovered
-     * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
-     */
-    List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
-        throws DiscovererException;
-
     /**
      * Build an artifact from a path in the repository
      *
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java
deleted file mode 100644 (file)
index f085602..0000000
+++ /dev/null
@@ -1,200 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the new repository layout (Maven 2.0+).
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="default"
- */
-public class DefaultArtifactDiscoverer
-    extends AbstractArtifactDiscoverer
-{
-    /**
-     * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
-     */
-    public Artifact buildArtifact( String path )
-        throws DiscovererException
-    {
-        List pathParts = new ArrayList();
-        StringTokenizer st = new StringTokenizer( path, "/\\" );
-        while ( st.hasMoreTokens() )
-        {
-            pathParts.add( st.nextToken() );
-        }
-
-        Collections.reverse( pathParts );
-
-        Artifact artifact;
-        if ( pathParts.size() >= 4 )
-        {
-            // maven 2.x path
-
-            // the actual artifact filename.
-            String filename = (String) pathParts.remove( 0 );
-
-            // the next one is the version.
-            String version = (String) pathParts.remove( 0 );
-
-            // the next one is the artifactId.
-            String artifactId = (String) pathParts.remove( 0 );
-
-            // the remaining are the groupId.
-            Collections.reverse( pathParts );
-            String groupId = StringUtils.join( pathParts.iterator(), "." );
-
-            String remainingFilename = filename;
-            if ( remainingFilename.startsWith( artifactId + "-" ) )
-            {
-                remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
-
-                String classifier = null;
-
-                // TODO: use artifact handler, share with legacy discoverer
-                String type;
-                if ( remainingFilename.endsWith( ".tar.gz" ) )
-                {
-                    type = "distribution-tgz";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
-                }
-                else if ( remainingFilename.endsWith( ".zip" ) )
-                {
-                    type = "distribution-zip";
-                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
-                }
-                else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
-                {
-                    type = "java-source";
-                    classifier = "test-sources";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - "-test-sources.jar".length() );
-                }
-                else if ( remainingFilename.endsWith( "-sources.jar" ) )
-                {
-                    type = "java-source";
-                    classifier = "sources";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
-                }
-                else
-                {
-                    int index = remainingFilename.lastIndexOf( "." );
-                    if ( index >= 0 )
-                    {
-                        type = remainingFilename.substring( index + 1 );
-                        remainingFilename = remainingFilename.substring( 0, index );
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename does not have an extension" );
-                    }
-                }
-
-                Artifact result;
-                if ( classifier == null )
-                {
-                    result =
-                        artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
-                }
-                else
-                {
-                    result =
-                        artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
-                }
-
-                if ( result.isSnapshot() )
-                {
-                    // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
-                    int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
-                    if ( classifierIndex >= 0 )
-                    {
-                        classifier = remainingFilename.substring( classifierIndex + 1 );
-                        remainingFilename = remainingFilename.substring( 0, classifierIndex );
-                        result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
-                                                                               type, classifier );
-                    }
-                    else
-                    {
-                        result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
-                                                                 Artifact.SCOPE_RUNTIME, type );
-                    }
-
-                    // poor encapsulation requires we do this to populate base version
-                    if ( !result.isSnapshot() )
-                    {
-                        throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
-                    }
-                    else if ( !result.getBaseVersion().equals( version ) )
-                    {
-                        throw new DiscovererException(
-                            "Built snapshot artifact base version does not match path version: " + result +
-                                "; should have been version: " + version );
-                    }
-                    else
-                    {
-                        artifact = result;
-                    }
-                }
-                else if ( !remainingFilename.startsWith( version ) )
-                {
-                    throw new DiscovererException( "Built artifact version does not match path version" );
-                }
-                else if ( !remainingFilename.equals( version ) )
-                {
-                    if ( remainingFilename.charAt( version.length() ) == '-' )
-                    {
-                        classifier = remainingFilename.substring( version.length() + 1 );
-                        artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
-                                                                                 classifier );
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path version does not corresspond to an artifact version" );
-                    }
-                }
-                else
-                {
-                    artifact = result;
-                }
-            }
-            else
-            {
-                throw new DiscovererException( "Path filename does not correspond to an artifact" );
-            }
-        }
-        else
-        {
-            throw new DiscovererException( "Path is too short to build an artifact from" );
-        }
-
-        return artifact;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java
new file mode 100644 (file)
index 0000000..9692595
--- /dev/null
@@ -0,0 +1,168 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.DirectoryWalker;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Discoverer Implementation.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @plexus.component role="org.apache.maven.archiva.discoverer.Discoverer"
+ *    role-hint="default"
+ */
+public class DefaultDiscoverer
+    extends AbstractLogEnabled
+    implements Discoverer
+{
+    /**
+     * Standard patterns to exclude from discovery as they are usually noise.
+     */
+    private static final String[] STANDARD_DISCOVERY_EXCLUDES = {
+        "bin/**",
+        "reports/**",
+        ".index",
+        ".reports/**",
+        ".maven/**",
+        "**/*snapshot-version",
+        "*/website/**",
+        "*/licences/**",
+        "**/.htaccess",
+        "**/*.html",
+        "**/*.txt",
+        "**/README*",
+        "**/CHANGELOG*",
+        "**/KEYS*" };
+
+    public DefaultDiscoverer()
+    {
+    }
+
+    public DiscovererStatistics scanRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+    {
+        return walkRepository( repository, consumers, includeSnapshots, true );
+    }
+
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+    {
+        return walkRepository( repository, consumers, includeSnapshots, false );
+    }
+
+    private DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+                                                 boolean includeSnapshots, boolean checkLastModified )
+    {
+        // Sanity Check
+
+        if ( repository == null )
+        {
+            throw new IllegalArgumentException( "Unable to operate on a null repository." );
+        }
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            throw new UnsupportedOperationException( "Only filesystem repositories are supported." );
+        }
+
+        File repositoryBase = new File( repository.getBasedir() );
+
+        if ( !repositoryBase.exists() )
+        {
+            throw new UnsupportedOperationException( "Unable to scan a repository, directory "
+                + repositoryBase.getAbsolutePath() + " does not exist." );
+        }
+
+        if ( !repositoryBase.isDirectory() )
+        {
+            throw new UnsupportedOperationException( "Unable to scan a repository, path "
+                + repositoryBase.getAbsolutePath() + " is not a directory." );
+        }
+
+        // Setup Includes / Excludes.
+
+        List allExcludes = new ArrayList();
+        List allIncludes = new ArrayList();
+
+        // Exclude all of the SCM patterns.
+        allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
+
+        // Exclude all of the archiva noise patterns.
+        allExcludes.addAll( Arrays.asList( STANDARD_DISCOVERY_EXCLUDES ) );
+
+        if ( !includeSnapshots )
+        {
+            allExcludes.add( "**/*-SNAPSHOT*" );
+        }
+
+        Iterator it = consumers.iterator();
+        while ( it.hasNext() )
+        {
+            DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+
+            // TODO Disabled, until I can find a better way to do this that doesn't clobber other consumers. - joakime
+            // addUniqueElements( consumer.getExcludePatterns(), allExcludes );
+            addUniqueElements( consumer.getIncludePatterns(), allIncludes );
+        }
+
+        // Setup Directory Walker
+
+        DirectoryWalker dirWalker = new DirectoryWalker();
+
+        dirWalker.setBaseDir( repositoryBase );
+
+        dirWalker.setIncludes( allIncludes );
+        dirWalker.setExcludes( allExcludes );
+
+        // Setup the Scan Instance
+
+        RepositoryScanner repoScanner = new RepositoryScanner( repository, consumers );
+        repoScanner.setCheckLastModified( checkLastModified );
+
+        repoScanner.setLogger( getLogger() );
+        dirWalker.addDirectoryWalkListener( repoScanner );
+
+        // Execute scan.
+        dirWalker.scan();
+
+        return repoScanner.getStatistics();
+    }
+
+    private void addUniqueElements( List fromList, List toList )
+    {
+        Iterator itFrom = fromList.iterator();
+        while ( itFrom.hasNext() )
+        {
+            Object o = itFrom.next();
+            if ( !toList.contains( o ) )
+            {
+                toList.add( o );
+            }
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java
deleted file mode 100644 (file)
index dd18c5d..0000000
+++ /dev/null
@@ -1,222 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * This class gets all the paths that contain the metadata files.
- *
- * @plexus.component role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" role-hint="default"
- */
-public class DefaultMetadataDiscoverer
-    extends AbstractDiscoverer
-    implements MetadataDiscoverer
-{
-    /**
-     * Standard patterns to include in discovery of metadata files.
-     *
-     * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
-     * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
-     * searching the local metadata in the first place though?
-     */
-    private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
-
-    public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter )
-        throws DiscovererException
-    {
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
-        }
-
-        List metadataFiles = new ArrayList();
-        List metadataPaths = scanForArtifactPaths( new File( repository.getBasedir() ), blacklistedPatterns,
-                                                   STANDARD_DISCOVERY_INCLUDES, null );
-
-        for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
-        {
-            String metadataPath = (String) i.next();
-            try
-            {
-                RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
-                File f = new File( repository.getBasedir(), metadataPath );
-                if ( filter.include( metadata, f.lastModified() ) )
-                {
-                    metadataFiles.add( metadata );
-                }
-                else
-                {
-                    addExcludedPath( metadataPath, "Metadata excluded by filter" );
-                }
-            }
-            catch ( DiscovererException e )
-            {
-                addKickedOutPath( metadataPath, e.getMessage() );
-            }
-        }
-
-        return metadataFiles;
-    }
-
-    public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
-        throws DiscovererException
-    {
-        return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-    }
-
-    private RepositoryMetadata buildMetadata( String repo, String metadataPath )
-        throws DiscovererException
-    {
-        Metadata m;
-        File f = new File( repo, metadataPath );
-        try
-        {
-            Reader reader = new FileReader( f );
-            MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
-
-            m = metadataReader.read( reader );
-        }
-        catch ( XmlPullParserException e )
-        {
-            throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
-        }
-        catch ( IOException e )
-        {
-            throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
-        }
-
-        RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
-
-        if ( repositoryMetadata == null )
-        {
-            throw new DiscovererException( "Unable to build a repository metadata from path" );
-        }
-
-        return repositoryMetadata;
-    }
-
-    /**
-     * Builds a RepositoryMetadata object from a Metadata object and its path.
-     *
-     * @param m            Metadata
-     * @param metadataPath path
-     * @return RepositoryMetadata if the parameters represent one; null if not
-     * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
-     */
-    private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
-    {
-        String metaGroupId = m.getGroupId();
-        String metaArtifactId = m.getArtifactId();
-        String metaVersion = m.getVersion();
-
-        // check if the groupId, artifactId and version is in the
-        // metadataPath
-        // parse the path, in reverse order
-        List pathParts = new ArrayList();
-        StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
-        while ( st.hasMoreTokens() )
-        {
-            pathParts.add( st.nextToken() );
-        }
-
-        Collections.reverse( pathParts );
-        // remove the metadata file
-        pathParts.remove( 0 );
-        Iterator it = pathParts.iterator();
-        String tmpDir = (String) it.next();
-
-        Artifact artifact = null;
-        if ( StringUtils.isNotEmpty( metaVersion ) )
-        {
-            artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
-        }
-
-        // snapshotMetadata
-        RepositoryMetadata metadata = null;
-        if ( tmpDir != null && tmpDir.equals( metaVersion ) )
-        {
-            if ( artifact != null )
-            {
-                metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-            }
-        }
-        else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
-        {
-            // artifactMetadata
-            if ( artifact != null )
-            {
-                metadata = new ArtifactRepositoryMetadata( artifact );
-            }
-            else
-            {
-                artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
-                metadata = new ArtifactRepositoryMetadata( artifact );
-            }
-        }
-        else
-        {
-            String groupDir = "";
-            int ctr = 0;
-            for ( it = pathParts.iterator(); it.hasNext(); )
-            {
-                String path = (String) it.next();
-                if ( ctr == 0 )
-                {
-                    groupDir = path;
-                }
-                else
-                {
-                    groupDir = path + "." + groupDir;
-                }
-                ctr++;
-            }
-
-            // groupMetadata
-            if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
-            {
-                metadata = new GroupRepositoryMetadata( metaGroupId );
-            }
-        }
-
-        return metadata;
-    }
-}
index 157557bf2a234f64f54eadb54dca39dd92097604..c6e1071259a4a23e9ab1ebe3fe6964d95ef77fd3 100644 (file)
@@ -19,26 +19,37 @@ package org.apache.maven.archiva.discoverer;
  * under the License.
  */
 
-import java.util.Iterator;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
 
 /**
- * @author Edwin Punzalan
+ * Discoverer - generic discoverer of content in an ArtifactRepository. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
  */
 public interface Discoverer
 {
     /**
-     * Get the list of paths kicked out during the discovery process.
-     *
-     * @return the paths as Strings.
+     * Scan the repository for changes.
+     * Report changes to the appropriate Consumer.
+     * 
+     * @param repository the repository to change.
+     * @param consumers use the provided list of consumers.
+     * @param includeSnapshots true to include snapshots in the scanning of this repository.
+     * @return the statistics for this scan.
      */
-    Iterator getKickedOutPathsIterator();
-
+    public DiscovererStatistics scanRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots );
+    
     /**
-     * Get the list of paths excluded during the discovery process.
-     *
-     * @return the paths as Strings.
+     * Walk the entire repository, regardless of change.
+     * Report changes to the appropriate Consumer.
+     * 
+     * @param repository the repository to change.
+     * @param consumers use the provided list of consumers.
+     * @param includeSnapshots true to include snapshots in the walking of this repository.
+     * @return the statistics for this scan.
      */
-    Iterator getExcludedPathsIterator();
-
-    void setTrackOmittedPaths( boolean trackOmittedPaths );
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots );
 }
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumer.java
new file mode 100644 (file)
index 0000000..e624cf8
--- /dev/null
@@ -0,0 +1,46 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * DiscovererConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface DiscovererConsumer
+{
+    public static final String ROLE = DiscovererConsumer.class.getName();
+
+    public String getName();
+    
+    public boolean init( ArtifactRepository repository );
+    
+    public List getExcludePatterns();
+    
+    public List getIncludePatterns();
+
+    public void processFile( File file ) throws DiscovererException;
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumerFactory.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererConsumerFactory.java
new file mode 100644 (file)
index 0000000..5f37d0f
--- /dev/null
@@ -0,0 +1,42 @@
+package org.apache.maven.archiva.discoverer;
+
+import org.codehaus.plexus.PlexusConstants;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+import org.codehaus.plexus.context.Context;
+import org.codehaus.plexus.context.ContextException;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
+
+/**
+ * DiscovererConsumerFactory - factory for consumers.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumerFactory"
+ */
+public class DiscovererConsumerFactory
+implements Contextualizable
+{
+    private PlexusContainer container;
+    
+    public DiscovererConsumer createConsumer( String name ) throws DiscovererException
+    {
+        DiscovererConsumer consumer;
+        try
+        {
+            consumer = (DiscovererConsumer) container.lookup(DiscovererConsumer.ROLE, name);
+        }
+        catch ( ComponentLookupException e )
+        {
+            throw new DiscovererException("Unable to create consumer [" + name + "]: " + e.getMessage(), e);
+        }
+        
+        return consumer;
+    }
+
+    public void contextualize( Context context )
+        throws ContextException
+    {
+        container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY );
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java
deleted file mode 100644 (file)
index b893078..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererPath
-{
-    /**
-     * The path discovered.
-     */
-    private final String path;
-
-    /**
-     * A comment about why the path is being processed.
-     */
-    private final String comment;
-
-    public DiscovererPath( String path, String comment )
-    {
-        this.path = path;
-        this.comment = comment;
-    }
-
-    public String getPath()
-    {
-        return path;
-    }
-
-    public String getComment()
-    {
-        return comment;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java
new file mode 100644 (file)
index 0000000..37ad035
--- /dev/null
@@ -0,0 +1,182 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.util.IOUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Properties;
+
+/**
+ * DiscovererStatistics 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DiscovererStatistics
+{
+    public static final String STATS_FILENAME = ".stats";
+
+    private static final String PROP_FILES_CONSUMED = "scan.consumed.files";
+
+    private static final String PROP_FILES_INCLUDED = "scan.included.files";
+
+    private static final String PROP_FILES_SKIPPED = "scan.skipped.files";
+
+    private static final String PROP_TIMESTAMP_STARTED = "scan.started.timestamp";
+
+    private static final String PROP_TIMESTAMP_FINISHED = "scan.finished.timestamp";
+
+    protected long timestampStarted = 0;
+
+    protected long timestampFinished = 0;
+
+    protected long filesIncluded = 0;
+
+    protected long filesConsumed = 0;
+
+    protected long filesSkipped = 0;
+
+    private ArtifactRepository repository;
+
+    public DiscovererStatistics( ArtifactRepository repository )
+    {
+        this.repository = repository;
+        load();
+    }
+
+    public void load()
+    {
+        File repositoryBase = new File( this.repository.getBasedir() );
+
+        File scanProperties = new File( repositoryBase, STATS_FILENAME );
+        FileInputStream fis = null;
+        try
+        {
+            Properties props = new Properties();
+            fis = new FileInputStream( scanProperties );
+            props.load( fis );
+
+            timestampFinished = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_FINISHED ), 0 );
+            timestampStarted = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_STARTED ), 0 );
+            filesIncluded = NumberUtils.toLong( props.getProperty( PROP_FILES_INCLUDED ), 0 );
+            filesConsumed = NumberUtils.toLong( props.getProperty( PROP_FILES_CONSUMED ), 0 );
+            filesSkipped = NumberUtils.toLong( props.getProperty( PROP_FILES_SKIPPED ), 0 );
+        }
+        catch ( IOException e )
+        {
+            reset();
+        }
+        finally
+        {
+            IOUtil.close( fis );
+        }
+    }
+
+    public void save()
+        throws DiscovererException
+    {
+        Properties props = new Properties();
+        props.setProperty( PROP_TIMESTAMP_FINISHED, String.valueOf( timestampFinished ) );
+        props.setProperty( PROP_TIMESTAMP_STARTED, String.valueOf( timestampStarted ) );
+        props.setProperty( PROP_FILES_INCLUDED, String.valueOf( filesIncluded ) );
+        props.setProperty( PROP_FILES_CONSUMED, String.valueOf( filesConsumed ) );
+        props.setProperty( PROP_FILES_SKIPPED, String.valueOf( filesSkipped ) );
+
+        File repositoryBase = new File( this.repository.getBasedir() );
+        File statsFile = new File( repositoryBase, STATS_FILENAME );
+
+        FileOutputStream fos = null;
+        try
+        {
+            fos = new FileOutputStream( statsFile );
+            props.store( fos, "Last Scan Information, managed by Archiva. DO NOT EDIT" );
+            fos.flush();
+        }
+        catch ( IOException e )
+        {
+            throw new DiscovererException( "Unable to write scan stats to file " + statsFile.getAbsolutePath() + ": "
+                + e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( fos );
+        }
+    }
+
+    public void reset()
+    {
+        timestampStarted = 0;
+        timestampFinished = 0;
+        filesIncluded = 0;
+        filesConsumed = 0;
+        filesSkipped = 0;
+    }
+
+    public long getElapsedMilliseconds()
+    {
+        return timestampFinished - timestampStarted;
+    }
+
+    public long getFilesConsumed()
+    {
+        return filesConsumed;
+    }
+
+    public long getFilesIncluded()
+    {
+        return filesIncluded;
+    }
+
+    public ArtifactRepository getRepository()
+    {
+        return repository;
+    }
+
+    public long getTimestampFinished()
+    {
+        return timestampFinished;
+    }
+
+    public long getTimestampStarted()
+    {
+        return timestampStarted;
+    }
+
+    public long getFilesSkipped()
+    {
+        return filesSkipped;
+    }
+
+    public void setTimestampFinished( long timestampFinished )
+    {
+        this.timestampFinished = timestampFinished;
+    }
+
+    public void setTimestampStarted( long timestampStarted )
+    {
+        this.timestampStarted = timestampStarted;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java
deleted file mode 100644 (file)
index 13b8080..0000000
+++ /dev/null
@@ -1,291 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the legacy repository layout (Maven 1.x).
- * Method used to build an artifact object using a relative path from a repository base directory.  An artifactId
- * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
- * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
- * they are reserved for version usage.
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="legacy"
- */
-public class LegacyArtifactDiscoverer
-    extends AbstractArtifactDiscoverer
-{
-    /**
-     * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
-     */
-    public Artifact buildArtifact( String path )
-        throws DiscovererException
-    {
-        StringTokenizer tokens = new StringTokenizer( path, "/\\" );
-
-        Artifact result;
-
-        int numberOfTokens = tokens.countTokens();
-
-        if ( numberOfTokens == 3 )
-        {
-            String groupId = tokens.nextToken();
-
-            String type = tokens.nextToken();
-
-            if ( type.endsWith( "s" ) )
-            {
-                type = type.substring( 0, type.length() - 1 );
-
-                // contains artifactId, version, classifier, and extension.
-                String avceGlob = tokens.nextToken();
-
-                //noinspection CollectionDeclaredAsConcreteClass
-                LinkedList avceTokenList = new LinkedList();
-
-                StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
-                while ( avceTokenizer.hasMoreTokens() )
-                {
-                    avceTokenList.addLast( avceTokenizer.nextToken() );
-                }
-
-                String lastAvceToken = (String) avceTokenList.removeLast();
-
-                // TODO: share with other discoverer, use artifact handlers instead
-                if ( lastAvceToken.endsWith( ".tar.gz" ) )
-                {
-                    type = "distribution-tgz";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( "sources.jar" ) )
-                {
-                    type = "java-source";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
-                {
-                    type = "javadoc.jar";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( ".zip" ) )
-                {
-                    type = "distribution-zip";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else
-                {
-                    int extPos = lastAvceToken.lastIndexOf( '.' );
-
-                    if ( extPos > 0 )
-                    {
-                        String ext = lastAvceToken.substring( extPos + 1 );
-                        if ( type.equals( ext ) || "plugin".equals( type ) )
-                        {
-                            lastAvceToken = lastAvceToken.substring( 0, extPos );
-
-                            avceTokenList.addLast( lastAvceToken );
-                        }
-                        else
-                        {
-                            throw new DiscovererException( "Path type does not match the extension" );
-                        }
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename does not have an extension" );
-                    }
-                }
-
-                // let's discover the version, and whatever's leftover will be either
-                // a classifier, or part of the artifactId, depending on position.
-                // Since version is at the end, we have to move in from the back.
-                Collections.reverse( avceTokenList );
-
-                // TODO: this is obscene - surely a better way?
-                String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
-                    "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
-                    "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
-                    "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
-                    "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
-                    "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
-                    "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
-
-                StringBuffer classifierBuffer = new StringBuffer();
-                StringBuffer versionBuffer = new StringBuffer();
-
-                boolean firstVersionTokenEncountered = false;
-                boolean firstToken = true;
-
-                int tokensIterated = 0;
-                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
-                {
-                    String token = (String) it.next();
-
-                    boolean tokenIsVersionPart = token.matches( validVersionParts );
-
-                    StringBuffer bufferToUpdate;
-
-                    // NOTE: logic in code is reversed, since we're peeling off the back
-                    // Any token after the last versionPart will be in the classifier.
-                    // Any token UP TO first non-versionPart is part of the version.
-                    if ( !tokenIsVersionPart )
-                    {
-                        if ( firstVersionTokenEncountered )
-                        {
-                            //noinspection BreakStatement
-                            break;
-                        }
-                        else
-                        {
-                            bufferToUpdate = classifierBuffer;
-                        }
-                    }
-                    else
-                    {
-                        firstVersionTokenEncountered = true;
-
-                        bufferToUpdate = versionBuffer;
-                    }
-
-                    if ( firstToken )
-                    {
-                        firstToken = false;
-                    }
-                    else
-                    {
-                        bufferToUpdate.insert( 0, '-' );
-                    }
-
-                    bufferToUpdate.insert( 0, token );
-
-                    tokensIterated++;
-                }
-
-                // Now, restore the proper ordering so we can build the artifactId.
-                Collections.reverse( avceTokenList );
-
-                // if we didn't find a version, then punt. Use the last token
-                // as the version, and set the classifier empty.
-                if ( versionBuffer.length() < 1 )
-                {
-                    if ( avceTokenList.size() > 1 )
-                    {
-                        int lastIdx = avceTokenList.size() - 1;
-
-                        versionBuffer.append( avceTokenList.get( lastIdx ) );
-                        avceTokenList.remove( lastIdx );
-                    }
-
-                    classifierBuffer.setLength( 0 );
-                }
-                else
-                {
-                    // if everything is kosher, then pop off all the classifier and
-                    // version tokens, leaving the naked artifact id in the list.
-                    avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
-                }
-
-                StringBuffer artifactIdBuffer = new StringBuffer();
-
-                firstToken = true;
-                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
-                {
-                    String token = (String) it.next();
-
-                    if ( firstToken )
-                    {
-                        firstToken = false;
-                    }
-                    else
-                    {
-                        artifactIdBuffer.append( '-' );
-                    }
-
-                    artifactIdBuffer.append( token );
-                }
-
-                String artifactId = artifactIdBuffer.toString();
-
-                if ( artifactId.length() > 0 )
-                {
-                    int lastVersionCharIdx = versionBuffer.length() - 1;
-                    if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
-                    {
-                        versionBuffer.setLength( lastVersionCharIdx );
-                    }
-
-                    String version = versionBuffer.toString();
-
-                    if ( version.length() > 0 )
-                    {
-                        if ( classifierBuffer.length() > 0 )
-                        {
-                            result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
-                                                                                   classifierBuffer.toString() );
-                        }
-                        else
-                        {
-                            result = artifactFactory.createArtifact( groupId, artifactId, version,
-                                                                     Artifact.SCOPE_RUNTIME, type );
-                        }
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename version is empty" );
-                    }
-                }
-                else
-                {
-                    throw new DiscovererException( "Path filename artifactId is empty" );
-                }
-            }
-            else
-            {
-                throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
-            }
-        }
-        else
-        {
-            throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
-        }
-
-        return result;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java
deleted file mode 100644 (file)
index ff74a92..0000000
+++ /dev/null
@@ -1,57 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for discovering metadata files.
- */
-public interface MetadataDiscoverer
-    extends Discoverer
-{
-    String ROLE = MetadataDiscoverer.class.getName();
-
-    /**
-     * Search for metadata files in the repository.
-     *
-     * @param repository          The repository.
-     * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
-     * @param metadataFilter      filter to use on the discovered metadata before returning
-     * @return the list of artifacts found
-     * @throws DiscovererException if there is a problem during the discovery process
-     */
-    List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter )
-        throws DiscovererException;
-
-    /**
-     * Search for metadata files in the repository.
-     *
-     * @param repository          The repository.
-     * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
-     * @return the list of artifacts found
-     * @throws DiscovererException if there is a problem during the discovery process
-     */
-    List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
-        throws DiscovererException;
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/PathUtil.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/PathUtil.java
new file mode 100644 (file)
index 0000000..5a2c740
--- /dev/null
@@ -0,0 +1,56 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+/**
+ * PathUtil - simple utility methods for path manipulation. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtil
+{
+    public static String getRelative( String basedir, File file )
+    {
+        return getRelative( basedir, file.getAbsolutePath() );
+    }
+
+    public static String getRelative( String basedir, String child )
+    {
+        if ( child.startsWith( basedir ) )
+        {
+            // simple solution.
+            return child.substring( basedir.length() + 1 );
+        }
+
+        String absoluteBasedir = new File( basedir ).getAbsolutePath();
+        if ( child.startsWith( absoluteBasedir ) )
+        {
+            // resolved basedir solution.
+            return child.substring( absoluteBasedir.length() + 1 );
+        }
+
+        // File is not within basedir.
+        throw new IllegalStateException( "Unable to obtain relative path of file " + child
+            + ", it is not within basedir " + basedir + "." );
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java
new file mode 100644 (file)
index 0000000..2384555
--- /dev/null
@@ -0,0 +1,204 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.SystemUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.DirectoryWalkListener;
+import org.codehaus.plexus.util.SelectorUtils;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * RepositoryScanner - this is an instance of a scan against a repository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class RepositoryScanner
+    implements DirectoryWalkListener
+{
+    public static final String ROLE = RepositoryScanner.class.getName();
+
+    private List consumers;
+
+    private ArtifactRepository repository;
+
+    private Logger logger;
+
+    private boolean isCaseSensitive = true;
+
+    private DiscovererStatistics stats;
+
+    private boolean checkLastModified = true;
+
+    public RepositoryScanner( ArtifactRepository repository, List consumerList )
+    {
+        this.repository = repository;
+        this.consumers = consumerList;
+        stats = new DiscovererStatistics( repository );
+
+        Iterator it = this.consumers.iterator();
+        while ( it.hasNext() )
+        {
+            DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+
+            if ( !consumer.init( this.repository ) )
+            {
+                throw new IllegalStateException( "Consumer [" + consumer.getName()
+                    + "] is reporting that it is incompatible with the [" + repository.getId() + "] repository." );
+            }
+        }
+
+        if ( SystemUtils.IS_OS_WINDOWS )
+        {
+            isCaseSensitive = false;
+        }
+    }
+
+    public DiscovererStatistics getStatistics()
+    {
+        return stats;
+    }
+
+    public void directoryWalkFinished()
+    {
+        getLogger().info( "Walk Finished." );
+        stats.timestampFinished = System.currentTimeMillis();
+        
+        if( isCheckLastModified() )
+        {
+            // Only save if dealing with 'last modified' concept.
+            
+            try
+            {
+                stats.save();
+            }
+            catch ( DiscovererException e )
+            {
+                getLogger().warn( "Unable to save Scan information.", e );
+            }
+        }
+    }
+
+    public void directoryWalkStarting( File basedir )
+    {
+        getLogger().info( "Walk Started." );
+        stats.reset();
+        stats.timestampStarted = System.currentTimeMillis();
+    }
+
+    public void directoryWalkStep( int percentage, File file )
+    {
+        getLogger().info( "Walk Step: " + percentage + ", " + file );
+
+        // Timestamp finished points to the last successful scan, not this current one.
+        if ( isCheckLastModified() && ( file.lastModified() <= stats.timestampFinished ) )
+        {
+            // Skip file as no change has occured.
+            getLogger().debug( "Skipping, No Change: " + file.getAbsolutePath() );
+            stats.filesSkipped++;
+            return;
+        }
+
+        synchronized ( consumers )
+        {
+            stats.filesIncluded++;
+
+            String relativePath = PathUtil.getRelative( repository.getBasedir(), file );
+
+            Iterator itConsumers = this.consumers.iterator();
+            while ( itConsumers.hasNext() )
+            {
+                DiscovererConsumer consumer = (DiscovererConsumer) itConsumers.next();
+
+                if ( isConsumerOfFile( consumer, relativePath ) )
+                {
+                    try
+                    {
+                        getLogger().info( "Sending to consumer: " + consumer.getName() );
+                        stats.filesConsumed++;
+                        consumer.processFile( file );
+                    }
+                    catch ( Exception e )
+                    {
+                        /* Intentionally Catch all exceptions.
+                         * So that the discoverer processing can continue.
+                         */
+                        getLogger()
+                            .error( "Unable to process file [" + file.getAbsolutePath() + "]: " + e.getMessage(), e );
+                    }
+                }
+                else
+                {
+                    getLogger().info( "Skipping consumer " + consumer.getName() + " for file " + relativePath );
+                }
+            }
+        }
+    }
+
+    private boolean isConsumerOfFile( DiscovererConsumer consumer, String relativePath )
+    {
+        Iterator it = consumer.getIncludePatterns().iterator();
+        // String name = file.getAbsolutePath();
+        while ( it.hasNext() )
+        {
+            String pattern = (String) it.next();
+            if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+            {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    public boolean isCheckLastModified()
+    {
+        return checkLastModified;
+    }
+
+    public void setCheckLastModified( boolean checkLastModified )
+    {
+        this.checkLastModified = checkLastModified;
+    }
+
+    /**
+     * Debug method from DirectoryWalker.
+     */
+    public void debug( String message )
+    {
+        getLogger().debug( "Repository Scanner: " + message );
+    }
+
+    public Logger getLogger()
+    {
+        return logger;
+    }
+
+    public void setLogger( Logger logger )
+    {
+        this.logger = logger;
+    }
+
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilder.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..f640733
--- /dev/null
@@ -0,0 +1,55 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractLayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    /**
+     * @plexus.requirement
+     */
+    protected ArtifactFactory artifactFactory;
+
+    /**
+     * Constructor used by plexus
+     */
+    public AbstractLayoutArtifactBuilder()
+    {
+
+    }
+
+    /**
+     * Constructor used by manual process.
+     * 
+     * @param artifactFactory the artifact factory to use.
+     */
+    public AbstractLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        this.artifactFactory = artifactFactory;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/BuilderException.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/BuilderException.java
new file mode 100644 (file)
index 0000000..d619fb6
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+/**
+ * BuilderException - used to indicate a problem during the building of an object from file. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BuilderException
+    extends DiscovererException
+{
+
+    public BuilderException( String message, Throwable cause )
+    {
+        super( message, cause );
+    }
+
+    public BuilderException( String message )
+    {
+        super( message );
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilder.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..0d6123f
--- /dev/null
@@ -0,0 +1,214 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * DefaultLayoutArtifactBuilder - artifact builder for default layout repositories. 
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder"
+ *     role-hint="default"
+ */
+public class DefaultLayoutArtifactBuilder
+    extends AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    public DefaultLayoutArtifactBuilder()
+    {
+        super();
+    }
+
+    public DefaultLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        super( artifactFactory );
+    }
+
+    public Artifact build( String pathToArtifact )
+        throws BuilderException, DiscovererException
+    {
+        List pathParts = new ArrayList();
+        StringTokenizer st = new StringTokenizer( pathToArtifact, "/\\" );
+        while ( st.hasMoreTokens() )
+        {
+            pathParts.add( st.nextToken() );
+        }
+
+        Collections.reverse( pathParts );
+
+        Artifact artifact;
+        if ( pathParts.size() >= 4 )
+        {
+            // maven 2.x path
+
+            // the actual artifact filename.
+            String filename = (String) pathParts.remove( 0 );
+
+            // the next one is the version.
+            String version = (String) pathParts.remove( 0 );
+
+            // the next one is the artifactId.
+            String artifactId = (String) pathParts.remove( 0 );
+
+            // the remaining are the groupId.
+            Collections.reverse( pathParts );
+            String groupId = StringUtils.join( pathParts.iterator(), "." );
+
+            String remainingFilename = filename;
+            if ( remainingFilename.startsWith( artifactId + "-" ) )
+            {
+                remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
+
+                String classifier = null;
+
+                // TODO: use artifact handler, share with legacy discoverer
+                String type;
+                if ( remainingFilename.endsWith( ".tar.gz" ) )
+                {
+                    type = "distribution-tgz";
+                    remainingFilename = remainingFilename
+                        .substring( 0, remainingFilename.length() - ".tar.gz".length() );
+                }
+                else if ( remainingFilename.endsWith( ".zip" ) )
+                {
+                    type = "distribution-zip";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
+                }
+                else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
+                {
+                    type = "java-source";
+                    classifier = "test-sources";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+                        - "-test-sources.jar".length() );
+                }
+                else if ( remainingFilename.endsWith( "-sources.jar" ) )
+                {
+                    type = "java-source";
+                    classifier = "sources";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+                        - "-sources.jar".length() );
+                }
+                else
+                {
+                    int index = remainingFilename.lastIndexOf( "." );
+                    if ( index >= 0 )
+                    {
+                        type = remainingFilename.substring( index + 1 );
+                        remainingFilename = remainingFilename.substring( 0, index );
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename does not have an extension." );
+                    }
+                }
+
+                Artifact result;
+                if ( classifier == null )
+                {
+                    result = artifactFactory
+                        .createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
+                }
+                else
+                {
+                    result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                           classifier );
+                }
+
+                if ( result.isSnapshot() )
+                {
+                    // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
+                    int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
+                    if ( classifierIndex >= 0 )
+                    {
+                        classifier = remainingFilename.substring( classifierIndex + 1 );
+                        remainingFilename = remainingFilename.substring( 0, classifierIndex );
+                        result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+                                                                               type, classifier );
+                    }
+                    else
+                    {
+                        result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
+                                                                 Artifact.SCOPE_RUNTIME, type );
+                    }
+
+                    // poor encapsulation requires we do this to populate base version
+                    if ( !result.isSnapshot() )
+                    {
+                        throw new BuilderException( "Failed to create a snapshot artifact: " + result );
+                    }
+                    else if ( !result.getBaseVersion().equals( version ) )
+                    {
+                        throw new BuilderException(
+                                                    "Built snapshot artifact base version does not match path version: "
+                                                        + result.getBaseVersion() + "; should have been version: "
+                                                        + version );
+                    }
+                    else
+                    {
+                        artifact = result;
+                    }
+                }
+                else if ( !remainingFilename.startsWith( version ) )
+                {
+                    throw new BuilderException( "Built artifact version does not match path version" );
+                }
+                else if ( !remainingFilename.equals( version ) )
+                {
+                    if ( remainingFilename.charAt( version.length() ) == '-' )
+                    {
+                        classifier = remainingFilename.substring( version.length() + 1 );
+                        artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                                 classifier );
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path version does not corresspond to an artifact version" );
+                    }
+                }
+                else
+                {
+                    artifact = result;
+                }
+            }
+            else
+            {
+                throw new BuilderException( "Path filename does not correspond to an artifact." );
+            }
+        }
+        else
+        {
+            throw new BuilderException( "Path is too short to build an artifact from." );
+        }
+
+        return artifact;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LayoutArtifactBuilder.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..43b82c3
--- /dev/null
@@ -0,0 +1,37 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+/**
+ * LayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @todo this concept should really exist inside of the {@link ArtifactRepositoryLayout}
+ */
+public interface LayoutArtifactBuilder
+{
+    public Artifact build( String pathToArtifact ) throws BuilderException, DiscovererException;
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilder.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..d46a0c5
--- /dev/null
@@ -0,0 +1,299 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+
+/**
+ * LegacyLayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ *  @plexus.component role="org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder"
+ *     role-hint="legacy"
+ */
+public class LegacyLayoutArtifactBuilder
+    extends AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    public LegacyLayoutArtifactBuilder()
+    {
+        super();
+    }
+
+    public LegacyLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        super( artifactFactory );
+    }
+
+    public Artifact build( String pathToArtifact )
+        throws BuilderException, DiscovererException
+    {
+        StringTokenizer tokens = new StringTokenizer( pathToArtifact, "/\\" );
+
+        Artifact result;
+
+        int numberOfTokens = tokens.countTokens();
+
+        if ( numberOfTokens == 3 )
+        {
+            String groupId = tokens.nextToken();
+
+            String type = tokens.nextToken();
+
+            if ( type.endsWith( "s" ) )
+            {
+                type = type.substring( 0, type.length() - 1 );
+
+                // contains artifactId, version, classifier, and extension.
+                String avceGlob = tokens.nextToken();
+
+                //noinspection CollectionDeclaredAsConcreteClass
+                LinkedList avceTokenList = new LinkedList();
+
+                StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
+                while ( avceTokenizer.hasMoreTokens() )
+                {
+                    avceTokenList.addLast( avceTokenizer.nextToken() );
+                }
+
+                String lastAvceToken = (String) avceTokenList.removeLast();
+
+                // TODO: share with other discoverer, use artifact handlers instead
+                if ( lastAvceToken.endsWith( ".tar.gz" ) )
+                {
+                    type = "distribution-tgz";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( "sources.jar" ) )
+                {
+                    type = "java-source";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
+                {
+                    type = "javadoc.jar";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( ".zip" ) )
+                {
+                    type = "distribution-zip";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else
+                {
+                    int extPos = lastAvceToken.lastIndexOf( '.' );
+
+                    if ( extPos > 0 )
+                    {
+                        String ext = lastAvceToken.substring( extPos + 1 );
+                        if ( type.equals( ext ) || "plugin".equals( type ) )
+                        {
+                            lastAvceToken = lastAvceToken.substring( 0, extPos );
+
+                            avceTokenList.addLast( lastAvceToken );
+                        }
+                        else
+                        {
+                            throw new BuilderException( "Path type does not match the extension" );
+                        }
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename does not have an extension" );
+                    }
+                }
+
+                // let's discover the version, and whatever's leftover will be either
+                // a classifier, or part of the artifactId, depending on position.
+                // Since version is at the end, we have to move in from the back.
+                Collections.reverse( avceTokenList );
+
+                // TODO: this is obscene - surely a better way?
+                String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+                    + "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+                    + "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+                    + "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+                    + "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+                    + "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+                    + "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
+
+                StringBuffer classifierBuffer = new StringBuffer();
+                StringBuffer versionBuffer = new StringBuffer();
+
+                boolean firstVersionTokenEncountered = false;
+                boolean firstToken = true;
+
+                int tokensIterated = 0;
+                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+                {
+                    String token = (String) it.next();
+
+                    boolean tokenIsVersionPart = token.matches( validVersionParts );
+
+                    StringBuffer bufferToUpdate;
+
+                    // NOTE: logic in code is reversed, since we're peeling off the back
+                    // Any token after the last versionPart will be in the classifier.
+                    // Any token UP TO first non-versionPart is part of the version.
+                    if ( !tokenIsVersionPart )
+                    {
+                        if ( firstVersionTokenEncountered )
+                        {
+                            //noinspection BreakStatement
+                            break;
+                        }
+                        else
+                        {
+                            bufferToUpdate = classifierBuffer;
+                        }
+                    }
+                    else
+                    {
+                        firstVersionTokenEncountered = true;
+
+                        bufferToUpdate = versionBuffer;
+                    }
+
+                    if ( firstToken )
+                    {
+                        firstToken = false;
+                    }
+                    else
+                    {
+                        bufferToUpdate.insert( 0, '-' );
+                    }
+
+                    bufferToUpdate.insert( 0, token );
+
+                    tokensIterated++;
+                }
+
+                // Now, restore the proper ordering so we can build the artifactId.
+                Collections.reverse( avceTokenList );
+
+                // if we didn't find a version, then punt. Use the last token
+                // as the version, and set the classifier empty.
+                if ( versionBuffer.length() < 1 )
+                {
+                    if ( avceTokenList.size() > 1 )
+                    {
+                        int lastIdx = avceTokenList.size() - 1;
+
+                        versionBuffer.append( avceTokenList.get( lastIdx ) );
+                        avceTokenList.remove( lastIdx );
+                    }
+
+                    classifierBuffer.setLength( 0 );
+                }
+                else
+                {
+                    // if everything is kosher, then pop off all the classifier and
+                    // version tokens, leaving the naked artifact id in the list.
+                    avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
+                }
+
+                StringBuffer artifactIdBuffer = new StringBuffer();
+
+                firstToken = true;
+                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+                {
+                    String token = (String) it.next();
+
+                    if ( firstToken )
+                    {
+                        firstToken = false;
+                    }
+                    else
+                    {
+                        artifactIdBuffer.append( '-' );
+                    }
+
+                    artifactIdBuffer.append( token );
+                }
+
+                String artifactId = artifactIdBuffer.toString();
+
+                if ( artifactId.length() > 0 )
+                {
+                    int lastVersionCharIdx = versionBuffer.length() - 1;
+                    if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
+                    {
+                        versionBuffer.setLength( lastVersionCharIdx );
+                    }
+
+                    String version = versionBuffer.toString();
+
+                    if ( version.length() > 0 )
+                    {
+                        if ( classifierBuffer.length() > 0 )
+                        {
+                            result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                                   classifierBuffer.toString() );
+                        }
+                        else
+                        {
+                            result = artifactFactory.createArtifact( groupId, artifactId, version,
+                                                                     Artifact.SCOPE_RUNTIME, type );
+                        }
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename version is empty" );
+                    }
+                }
+                else
+                {
+                    throw new BuilderException( "Path filename artifactId is empty" );
+                }
+            }
+            else
+            {
+                throw new BuilderException( "Path artifact type does not corresspond to an artifact type" );
+            }
+        }
+        else
+        {
+            throw new BuilderException( "Path does not match a legacy repository path for an artifact" );
+        }
+
+        return result;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/AbstractDiscovererConsumer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/AbstractDiscovererConsumer.java
new file mode 100644 (file)
index 0000000..c94dc0f
--- /dev/null
@@ -0,0 +1,62 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * AbstractDiscovererConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractDiscovererConsumer
+    extends AbstractLogEnabled
+    implements DiscovererConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    protected ArtifactFactory artifactFactory;
+
+    protected ArtifactRepository repository;
+
+    public List getExcludePatterns()
+    {
+        return Collections.EMPTY_LIST;
+    }
+
+    public boolean init( ArtifactRepository repository )
+    {
+        this.repository = repository;
+        return isEnabled();
+    }
+    
+    protected boolean isEnabled()
+    {
+        return true;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumer.java
new file mode 100644 (file)
index 0000000..6e6d8d9
--- /dev/null
@@ -0,0 +1,139 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.archiva.discoverer.builders.BuilderException;
+import org.apache.maven.archiva.discoverer.builders.DefaultLayoutArtifactBuilder;
+import org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder;
+import org.apache.maven.archiva.discoverer.builders.LegacyLayoutArtifactBuilder;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * DefaultArtifactConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericArtifactConsumer
+    extends AbstractDiscovererConsumer
+    implements DiscovererConsumer
+{
+    private Map artifactBuilders = new HashMap();
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/*.pom" );
+        includePatterns.add( "**/*.jar" );
+        includePatterns.add( "**/*.war" );
+        includePatterns.add( "**/*.ear" );
+        includePatterns.add( "**/*.sar" );
+        includePatterns.add( "**/*.zip" );
+        includePatterns.add( "**/*.gz" );
+        includePatterns.add( "**/*.bz2" );
+    }
+
+    private String layoutId = "default";
+
+    public GenericArtifactConsumer()
+    {
+    }
+
+    public boolean init( ArtifactRepository repository )
+    {
+        this.artifactBuilders.clear();
+        this.artifactBuilders.put( "default", new DefaultLayoutArtifactBuilder( artifactFactory ) );
+        this.artifactBuilders.put( "legacy", new LegacyLayoutArtifactBuilder( artifactFactory ) );
+
+        if ( repository.getLayout() instanceof LegacyRepositoryLayout )
+        {
+            this.layoutId = "legacy";
+        }
+
+        return super.init( repository );
+    }
+
+    public abstract void processArtifact( Artifact artifact, File file );
+
+    public abstract void processArtifactBuildFailure( File path, String message );
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public String getName()
+    {
+        return "Artifact Consumer";
+    }
+
+    public boolean isEnabled()
+    {
+        ArtifactRepositoryLayout layout = repository.getLayout();
+        return ( layout instanceof DefaultRepositoryLayout ) || ( layout instanceof LegacyRepositoryLayout );
+    }
+
+    public void processFile( File file )
+        throws DiscovererException
+    {
+        try
+        {
+            Artifact artifact = buildArtifact( repository.getBasedir(), file.getAbsolutePath() );
+
+            processArtifact( artifact, file );
+        }
+        catch ( BuilderException e )
+        {
+            processArtifactBuildFailure( file, e.getMessage() );
+        }
+    }
+
+    /**
+     * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
+     */
+    private Artifact buildArtifact( String repoBaseDir, String path )
+        throws BuilderException, DiscovererException
+    {
+        LayoutArtifactBuilder builder = (LayoutArtifactBuilder) artifactBuilders.get( layoutId );
+
+        String relativePath = PathUtil.getRelative( repoBaseDir, path );
+
+        Artifact artifact = builder.build( relativePath );
+        artifact.setRepository( repository );
+        artifact.setFile( new File( repository.getBasedir(), path ) );
+
+        return artifact;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumer.java
new file mode 100644 (file)
index 0000000..aca2573
--- /dev/null
@@ -0,0 +1,113 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GenericModelConsumer - consumer for pom files.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericModelConsumer
+    extends AbstractDiscovererConsumer
+    implements DiscovererConsumer
+{
+    public abstract void processModel( Model model, File file );
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/*.pom" );
+    }
+
+    public GenericModelConsumer()
+    {
+
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public String getName()
+    {
+        return "MavenProject Consumer";
+    }
+
+    public boolean isEnabled()
+    {
+        return true;
+    }
+
+    public void processFile( File file )
+        throws DiscovererException
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        Model model = buildModel( repository.getBasedir(), relpath );
+        processModel( model, file );
+    }
+
+    private Model buildModel( String basedir, String modelpath )
+        throws DiscovererException
+    {
+        Model model;
+        File f = new File( basedir, modelpath );
+        Reader reader = null;
+        try
+        {
+            reader = new FileReader( f );
+            MavenXpp3Reader modelReader = new MavenXpp3Reader();
+
+            model = modelReader.read( reader );
+        }
+        catch ( XmlPullParserException e )
+        {
+            throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
+        }
+        catch ( IOException e )
+        {
+            throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( reader );
+        }
+
+        return model;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumer.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumer.java
new file mode 100644 (file)
index 0000000..2d5dae7
--- /dev/null
@@ -0,0 +1,219 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * GenericRepositoryMetadataConsumer - Consume any maven-metadata.xml files as {@link RepositoryMetadata} objects. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericRepositoryMetadataConsumer
+    extends AbstractDiscovererConsumer
+    implements DiscovererConsumer
+{
+    public abstract void processRepositoryMetadata( RepositoryMetadata metadata, File file );
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/maven-metadata.xml" );
+    }
+
+    public GenericRepositoryMetadataConsumer()
+    {
+
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public String getName()
+    {
+        return "RepositoryMetadata Consumer";
+    }
+
+    public boolean isEnabled()
+    {
+        // the RepositoryMetadata objects only exist in 'default' layout repositories.
+        ArtifactRepositoryLayout layout = repository.getLayout();
+        return ( layout instanceof DefaultRepositoryLayout );
+    }
+
+    public void processFile( File file )
+        throws DiscovererException
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), relpath );
+        processRepositoryMetadata( metadata, file );
+    }
+
+    private RepositoryMetadata buildMetadata( String repo, String metadataPath )
+        throws DiscovererException
+    {
+        Metadata m;
+        File f = new File( repo, metadataPath );
+        Reader reader = null;
+        try
+        {
+            reader = new FileReader( f );
+            MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
+
+            m = metadataReader.read( reader );
+        }
+        catch ( XmlPullParserException e )
+        {
+            throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
+        }
+        catch ( IOException e )
+        {
+            throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( reader );
+        }
+
+        RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
+
+        if ( repositoryMetadata == null )
+        {
+            throw new DiscovererException( "Unable to build a repository metadata from path" );
+        }
+
+        return repositoryMetadata;
+    }
+
+    /**
+     * Builds a RepositoryMetadata object from a Metadata object and its path.
+     *
+     * @param m            Metadata
+     * @param metadataPath path
+     * @return RepositoryMetadata if the parameters represent one; null if not
+     * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
+     */
+    private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
+    {
+        String metaGroupId = m.getGroupId();
+        String metaArtifactId = m.getArtifactId();
+        String metaVersion = m.getVersion();
+
+        // check if the groupId, artifactId and version is in the
+        // metadataPath
+        // parse the path, in reverse order
+        List pathParts = new ArrayList();
+        StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
+        while ( st.hasMoreTokens() )
+        {
+            pathParts.add( st.nextToken() );
+        }
+
+        Collections.reverse( pathParts );
+        // remove the metadata file
+        pathParts.remove( 0 );
+        Iterator it = pathParts.iterator();
+        String tmpDir = (String) it.next();
+
+        Artifact artifact = null;
+        if ( StringUtils.isNotEmpty( metaVersion ) )
+        {
+            artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
+        }
+
+        // snapshotMetadata
+        RepositoryMetadata metadata = null;
+        if ( tmpDir != null && tmpDir.equals( metaVersion ) )
+        {
+            if ( artifact != null )
+            {
+                metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+            }
+        }
+        else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
+        {
+            // artifactMetadata
+            if ( artifact != null )
+            {
+                metadata = new ArtifactRepositoryMetadata( artifact );
+            }
+            else
+            {
+                artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
+                metadata = new ArtifactRepositoryMetadata( artifact );
+            }
+        }
+        else
+        {
+            String groupDir = "";
+            int ctr = 0;
+            for ( it = pathParts.iterator(); it.hasNext(); )
+            {
+                String path = (String) it.next();
+                if ( ctr == 0 )
+                {
+                    groupDir = path;
+                }
+                else
+                {
+                    groupDir = path + "." + groupDir;
+                }
+                ctr++;
+            }
+
+            // groupMetadata
+            if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
+            {
+                metadata = new GroupRepositoryMetadata( metaGroupId );
+            }
+        }
+
+        return metadata;
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java
deleted file mode 100644 (file)
index 0a85ba1..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllArtifactFilter
-    implements ArtifactFilter
-{
-    public boolean include( Artifact artifact )
-    {
-        return true;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java
deleted file mode 100644 (file)
index 80b49a8..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllMetadataFilter
-    implements MetadataFilter
-{
-    public boolean include( RepositoryMetadata metadata, long timestamp )
-    {
-        return true;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java
deleted file mode 100644 (file)
index fff5fb5..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Ability to filter repository metadata lists.
- *
- * @todo should be in maven-artifact
- */
-public interface MetadataFilter
-{
-    /**
-     * Whether to include this metadata in the filtered list.
-     *
-     * @param metadata  the metadata
-     * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated
-     * @return whether to include it
-     */
-    boolean include( RepositoryMetadata metadata, long timestamp );
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java b/archiva-MRM-239/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java
deleted file mode 100644 (file)
index a3c4577..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * A filter to remove snapshot artifacts during discovery.
- */
-public class SnapshotArtifactFilter
-    implements ArtifactFilter
-{
-    public boolean include( Artifact artifact )
-    {
-        return !artifact.isSnapshot();
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java
deleted file mode 100644 (file)
index f1609e3..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public abstract class AbstractArtifactDiscovererTest
-    extends PlexusTestCase
-{
-    protected ArtifactDiscoverer discoverer;
-
-    private ArtifactFactory factory;
-
-    protected ArtifactRepository repository;
-
-    protected abstract String getLayout();
-
-    protected abstract File getRepositoryFile();
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
-
-        factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        repository = getRepository();
-    }
-
-    protected ArtifactRepository getRepository()
-        throws Exception
-    {
-        File basedir = getRepositoryFile();
-
-        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
-        ArtifactRepositoryLayout layout =
-            (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
-
-        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version )
-    {
-        Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
-        artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
-        artifact.setRepository( repository );
-        return artifact;
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
-    {
-        return factory.createArtifact( groupId, artifactId, version, null, type );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
-                                       String classifier )
-    {
-        return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java
new file mode 100644 (file)
index 0000000..d0e296a
--- /dev/null
@@ -0,0 +1,78 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ */
+public abstract class AbstractDiscovererTestCase
+    extends PlexusTestCase
+{
+    protected ArtifactRepository getLegacyRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    protected ArtifactRepository getDefaultRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    private void resetRepositoryState( ArtifactRepository repository )
+    {
+        // Clean out any .stats file.
+        File repoBaseDir = new File( repository.getBasedir() );
+
+        File statFile = new File( repoBaseDir, DiscovererStatistics.STATS_FILENAME );
+        if ( statFile.exists() )
+        {
+            statFile.delete();
+        }
+
+        // TODO: Clean out any index.
+        // TODO: Clean out any report.
+    }
+
+    protected ArtifactRepository createRepository( File basedir, String layout )
+        throws Exception
+    {
+        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+        ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, repoLayout, null, null );
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java
deleted file mode 100644 (file)
index cd20645..0000000
+++ /dev/null
@@ -1,702 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the default artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:DefaultArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class DefaultArtifactDiscovererTest
-    extends AbstractArtifactDiscovererTest
-{
-    private static final List JAVAX_BLACKLIST = Collections.singletonList( "javax/**" );
-
-    protected String getLayout()
-    {
-        return "default";
-    }
-
-    protected File getRepositoryFile()
-    {
-        return getTestFile( "src/test/repository" );
-    }
-
-    public void testDefaultExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            boolean b = path.indexOf( "CVS" ) >= 0;
-            if ( b )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
-            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
-        }
-    }
-
-    public void testStandardExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "KEYS".equals( path ) )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testBlacklistedExclude()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithBlacklist( JAVAX_BLACKLIST );
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-    }
-
-    public void testKickoutWithShortPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
-                              dPath.getComment() );
-
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongArtifactId()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
-                path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not wrong jar",
-                         "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoType()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not have an extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithLongerVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongSnapshotVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithSnapshotBaseVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
-                path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
-                         "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
-    }
-
-    public void testArtifactWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
-    }
-
-    public void testJavaSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
-    }
-
-    public void testTestSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources" ) ) );
-    }
-
-    public void testDistributionInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check zip included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
-        assertTrue( "Check tar.gz included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
-    }
-
-    public void testSnapshotInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertTrue( "Check snapshot included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testSnapshotInclusionWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check snapshot included", artifacts.contains(
-            createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
-    }
-
-    public void testSnapshotExclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertFalse( "Check snapshot included",
-                     artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
-    }
-
-    public void testFileSet()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check file is set", artifact.getFile() );
-        }
-    }
-
-    public void testRepositorySet()
-        throws MalformedURLException, DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        String url = repository.getUrl();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check repository set", artifact.getRepository() );
-            assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
-        }
-    }
-
-    public void testStandalonePoms()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-
-        // cull down to actual artifacts (only standalone poms will have type = pom)
-        Map keyedArtifacts = new HashMap();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
-            if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
-            {
-                keyedArtifacts.put( key, a );
-            }
-        }
-
-        List models = new ArrayList();
-
-        for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-
-            if ( "pom".equals( a.getType() ) )
-            {
-                models.add( a );
-            }
-        }
-
-        assertEquals( 4, models.size() );
-
-        // Define order we expect
-        Collections.sort( models );
-
-        Iterator itr = models.iterator();
-        Artifact model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "B", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "B", model.getArtifactId() );
-        assertEquals( "2.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "discovery", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.testgroup", model.getGroupId() );
-        assertEquals( "discovery", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-    }
-
-    public void testShortPath()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
-
-            fail( "Artifact should be null for short paths" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongArtifactId()
-        throws ComponentLookupException
-    {
-
-        try
-        {
-            discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
-
-            fail( "Artifact should be null for wrong ArtifactId" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoType()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
-            fail( "Artifact should be null for no type" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
-
-            fail( "Artifact should be null for wrong version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testLongVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
-
-            fail( "Artifact should be null for long version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongSnapshotVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
-
-            fail( "Artifact should be null for wrong snapshot version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testSnapshotBaseVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
-
-            fail( "Artifact should be null for snapshot base version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testPathWithClassifier()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
-    }
-
-    public void testWithJavaSourceInclusion()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
-    }
-
-    public void testDistributionArtifacts()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
-
-        testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
-
-        artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
-    }
-
-    public void testSnapshot()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
-
-        testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
-
-        artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
-    }
-
-    public void testNormal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
-    }
-
-    public void testSnapshotWithClassifier()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
-                      artifact );
-    }
-
-    private List discoverArtifactsWithSnapshots()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithBlacklist( List list )
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, list, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifacts()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java
deleted file mode 100644 (file)
index 50ac62b..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * This class tests the DefaultMetadataDiscoverer class.
- */
-public class DefaultMetadataDiscovererTest
-    extends PlexusTestCase
-{
-    private MetadataDiscoverer discoverer;
-
-    private static final String TEST_OPERATION = "test";
-
-    private ArtifactRepository repository;
-
-    private ArtifactFactory factory;
-
-    /**
-     *
-     */
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
-
-        factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        repository = getRepository();
-
-        removeTimestampMetadata();
-    }
-
-    protected ArtifactRepository getRepository()
-        throws Exception
-    {
-        File basedir = getTestFile( "src/test/repository" );
-
-        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
-        ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
-        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
-    }
-
-    /**
-     *
-     */
-    public void tearDown()
-        throws Exception
-    {
-        super.tearDown();
-        discoverer = null;
-    }
-
-    /**
-     * Test if metadata file in wrong directory was added to the kickedOutPaths.
-     */
-    public void testKickoutWrongDirectory()
-        throws DiscovererException
-    {
-        discoverer.discoverMetadata( repository, null );
-        Iterator iter = discoverer.getKickedOutPathsIterator();
-        boolean found = false;
-        while ( iter.hasNext() && !found )
-        {
-            DiscovererPath dPath = (DiscovererPath) iter.next();
-            String dir = dPath.getPath();
-
-            String normalizedDir = dir.replace( '\\', '/' );
-            if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( found );
-    }
-
-    /**
-     * Test if blank metadata file was added to the kickedOutPaths.
-     */
-    public void testKickoutBlankMetadata()
-        throws DiscovererException
-    {
-        discoverer.discoverMetadata( repository, null );
-        Iterator iter = discoverer.getKickedOutPathsIterator();
-        boolean found = false;
-        while ( iter.hasNext() && !found )
-        {
-            DiscovererPath dPath = (DiscovererPath) iter.next();
-            String dir = dPath.getPath();
-
-            String normalizedDir = dir.replace( '\\', '/' );
-            if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
-            {
-                found = true;
-                assertTrue( "Check reason for kickout", dPath.getComment().matches(
-                    "Error reading metadata file '(.*)': input contained no data" ) );
-            }
-        }
-        assertTrue( found );
-    }
-
-    private void removeTimestampMetadata()
-        throws IOException
-    {
-        // remove the metadata that tracks time
-        File file = new File( repository.getBasedir(), "maven-metadata.xml" );
-        System.gc(); // for Windows
-        file.delete();
-        assertFalse( file.exists() );
-    }
-
-    public void testDiscoverMetadata()
-        throws DiscovererException
-    {
-        List metadataPaths = discoverer.discoverMetadata( repository, null );
-        assertNotNull( "Check metadata not null", metadataPaths );
-
-        RepositoryMetadata metadata =
-            new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
-        metadata =
-            new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
-        metadata = new GroupRepositoryMetadata( "org.apache.maven" );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId )
-    {
-        return createArtifact( groupId, artifactId, "1.0" );
-    }
-
-    private Artifact createArtifact( String groupId, String artifactId, String version )
-    {
-        return factory.createArtifact( groupId, artifactId, version, null, "jar" );
-    }
-
-    private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
-    {
-        for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
-        {
-            RepositoryMetadata m = (RepositoryMetadata) i.next();
-
-            if ( m.getGroupId().equals( metadata.getGroupId() ) )
-            {
-                if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
-                {
-                    return true;
-                }
-                else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
-                {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java
deleted file mode 100644 (file)
index d0b2454..0000000
+++ /dev/null
@@ -1,537 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Test the legacy artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:LegacyArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class LegacyArtifactDiscovererTest
-    extends AbstractArtifactDiscovererTest
-{
-    private static final List JAVAX_SQL_BLACKLIST = Collections.singletonList( "javax.sql/**" );
-
-    protected String getLayout()
-    {
-        return "legacy";
-    }
-
-    protected File getRepositoryFile()
-    {
-        return getTestFile( "src/test/legacy-repository" );
-    }
-
-    public void testDefaultExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( path.indexOf( "CVS" ) >= 0 )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
-            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
-        }
-    }
-
-    public void testStandardExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "KEYS".equals( path ) )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testBlacklistedExclude()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithBlacklist();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-    }
-
-    public void testKickoutWithShortPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Path does not match a legacy repository path for an artifact", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithLongPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Path does not match a legacy repository path for an artifact", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithInvalidType()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoExtension()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not have an extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongExtension()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path type does not match the extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
-    }
-
-    public void testTextualVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
-    }
-
-    public void testArtifactWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
-    }
-
-    public void testJavaSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
-    }
-
-    public void testDistributionInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check zip included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
-        assertTrue( "Check tar.gz included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
-    }
-
-    public void testSnapshotInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertTrue( "Check snapshot included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testSnapshotExclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertFalse( "Check snapshot included",
-                     artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testFileSet()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check file is set", artifact.getFile() );
-        }
-    }
-
-    public void testRepositorySet()
-        throws MalformedURLException, DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        String url = repository.getUrl();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check repository set", artifact.getRepository() );
-            assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
-        }
-    }
-
-    public void testWrongArtifactPackaging()
-        throws ComponentLookupException, DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
-
-            fail( "Artifact should be null for wrong package extension" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoArtifactId()
-        throws DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
-
-            fail( "Artifact should be null when artifactId is missing" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-
-        try
-        {
-            discoverer.buildArtifact( "groupId/jars/1.0.jar" );
-
-            fail( "Artifact should be null when artifactId is missing" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoType()
-        throws ComponentLookupException, DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
-            fail( "Artifact should be null for no type" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testSnapshot()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
-    }
-
-    public void testFinal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
-    }
-
-    public void testNormal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/jars/jdbc-2.0.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
-    }
-
-    public void testJavadoc()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc" ), artifact );
-    }
-
-    public void testSources()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/java-sources/jdbc-2.0-sources.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources" ), artifact );
-    }
-
-    public void testPlugin()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "maven/plugins/maven-test-plugin-1.8.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "maven", "maven-test-plugin", "1.8", "plugin" ), artifact );
-    }
-
-
-    private List discoverArtifacts()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithBlacklist()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, JAVAX_SQL_BLACKLIST, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithSnapshots()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
-    }
-}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilderTestCase.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/AbstractLayoutArtifactBuilderTestCase.java
new file mode 100644 (file)
index 0000000..414bc96
--- /dev/null
@@ -0,0 +1,26 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.PlexusTestCase;
+
+public class AbstractLayoutArtifactBuilderTestCase
+extends PlexusTestCase
+{
+
+    protected void assertArtifact( String groupId, String artifactId, String version, String type, String classifier, Artifact artifact )
+    {
+        assertNotNull( "Artifact cannot be null.", artifact );
+    
+        assertEquals( "Artifact groupId", groupId, artifact.getGroupId() );
+        assertEquals( "Artifact artifactId", artifactId, artifact.getArtifactId() );
+        assertEquals( "Artifact version", version, artifact.getVersion() );
+        assertEquals( "Artifact type", type, artifact.getType() );
+    
+        if ( StringUtils.isNotBlank( classifier ) )
+        {
+            assertEquals( "Artifact classifier", classifier, artifact.getClassifier() );
+        }
+    }
+    
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilderTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/DefaultLayoutArtifactBuilderTest.java
new file mode 100644 (file)
index 0000000..e62200a
--- /dev/null
@@ -0,0 +1,216 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+/**
+ * DefaultLayoutArtifactBuilderTest 
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultLayoutArtifactBuilderTest
+    extends AbstractLayoutArtifactBuilderTestCase
+{
+    LayoutArtifactBuilder builder;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "default" );
+        assertNotNull( builder );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( builder != null )
+        {
+            release( builder );
+        }
+        super.tearDown();
+    }
+
+    public void testPathDistributionArtifacts()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ) );
+
+        assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0.zip" ) );
+    }
+
+    public void testPathNormal()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+            .build( "/org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+        assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+            .build( "org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, builder.build( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ) );
+
+    }
+
+    public void testPathSnapshots()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT", "jar", null, builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar" ) );
+
+        assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ) );
+    }
+
+    public void testPathSnapshotWithClassifier()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ) );
+    }
+
+    public void testPathWithClassifier()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", builder
+            .build( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ) );
+    }
+
+    public void testPathWithJavaSourceInclusion()
+        throws BuilderException, DiscovererException
+    {
+        assertArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ) );
+    }
+
+    public void testProblemMissingType()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1/invalid-1" );
+            fail( "Should have detected missing type." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename does not have an extension.", e.getMessage() );
+        }
+    }
+
+    public void testProblemNonSnapshotInSnapshotDir()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+            fail( "Non Snapshot artifact inside of an Snapshot dir is invalid." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", e.getMessage() );
+        }
+    }
+
+    public void testProblemPathTooShort()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid-1.0.jar" );
+            fail( "Should have detected that path is too short." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path is too short to build an artifact from.", e.getMessage() );
+        }
+    }
+
+    public void testProblemTimestampSnapshotNotInSnapshotDir()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+            fail( "Timestamped Snapshot artifact not inside of an Snapshot dir is invalid." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            // TODO: Is this really the right thing to do for this kind of artifact??
+            assertEquals( "Built snapshot artifact base version does not match path version: 1.0-SNAPSHOT; "
+                + "should have been version: 1.0-20050611.123456-1", e.getMessage() );
+        }
+    }
+
+    public void testProblemVersionPathMismatch()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0/invalid-2.0.jar" );
+            fail( "Should have detected version mismatch between path and artifact." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Built artifact version does not match path version", e.getMessage() );
+        }
+    }
+
+    public void testProblemVersionPathMismatchAlt()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0/invalid-1.0b.jar" );
+            fail( "Should have version mismatch between directory and artifact." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path version does not corresspond to an artifact version", e.getMessage() );
+        }
+    }
+
+    public void testProblemWrongArtifactId()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+            fail( "Should have detected wrong artifact Id." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename does not correspond to an artifact.", e.getMessage() );
+        }
+    }
+
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilderTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/builders/LegacyLayoutArtifactBuilderTest.java
new file mode 100644 (file)
index 0000000..6c5818a
--- /dev/null
@@ -0,0 +1,162 @@
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+/**
+ * LegacyLayoutArtifactBuilderTest 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class LegacyLayoutArtifactBuilderTest
+    extends AbstractLayoutArtifactBuilderTestCase
+{
+    LayoutArtifactBuilder builder;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "legacy" );
+        assertNotNull( builder );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( builder != null )
+        {
+            release( builder );
+        }
+        super.tearDown();
+    }
+
+    public void testPathNormal()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "javax.sql/jars/jdbc-2.0.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifact );
+    }
+
+    public void testPathFinal()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar" );
+
+        assertArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606", "jar", null, artifact );
+    }
+
+    public void testPathSnapshot()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar" );
+
+        assertArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT", "jar", null, artifact );
+    }
+
+    public void testPathJavadoc()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc", artifact );
+    }
+
+    public void testPathSources()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "javax.sql/java-sources/jdbc-2.0-sources.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources", artifact );
+    }
+
+    public void testPathPlugin()
+        throws BuilderException, DiscovererException
+    {
+        Artifact artifact = builder.build( "maven/plugins/maven-test-plugin-1.8.jar" );
+
+        assertArtifact( "maven", "maven-test-plugin", "1.8", "plugin", null, artifact );
+    }
+
+    public void testProblemNoType()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1/invalid-1" );
+
+            fail( "Should have detected no type." );
+        }
+        catch ( DiscovererException e )
+        {
+            /* expected path */
+            assertEquals( "Path does not match a legacy repository path for an artifact", e.getMessage() );
+        }
+    }
+
+    public void testProblemWrongArtifactPackaging()
+        throws ComponentLookupException, DiscovererException
+    {
+        try
+        {
+            builder.build( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
+
+            fail( "Should have detected wrong package extension." );
+        }
+        catch ( DiscovererException e )
+        {
+            /* expected path */
+            assertEquals( "Path type does not match the extension", e.getMessage() );
+        }
+    }
+
+    public void testProblemNoArtifactId()
+        throws DiscovererException
+    {
+        try
+        {
+            builder.build( "groupId/jars/-1.0.jar" );
+
+            fail( "Should have detected artifactId is missing" );
+        }
+        catch ( DiscovererException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename artifactId is empty", e.getMessage() );
+        }
+
+        try
+        {
+            builder.build( "groupId/jars/1.0.jar" );
+
+            fail( "Should have detected artifactId is missing" );
+        }
+        catch ( DiscovererException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename artifactId is empty", e.getMessage() );
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/AbstractConsumerTestCase.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/AbstractConsumerTestCase.java
new file mode 100644 (file)
index 0000000..c9b72b8
--- /dev/null
@@ -0,0 +1,76 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.discoverer.AbstractDiscovererTestCase;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractConsumerTestCase 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractConsumerTestCase
+    extends AbstractDiscovererTestCase
+{
+    protected ArtifactFactory artifactFactory;
+
+    protected Discoverer discoverer;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+        discoverer = (Discoverer) lookup( Discoverer.class.getName(), "default" );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( discoverer != null )
+        {
+            release( discoverer );
+        }
+
+        if ( artifactFactory != null )
+        {
+            release( artifactFactory );
+        }
+        super.tearDown();
+    }
+
+    protected Artifact createArtifact( String groupId, String artifactId, String version, String type, String classifier )
+    {
+        if ( StringUtils.isNotBlank( classifier ) )
+        {
+            return artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+        }
+        else
+        {
+            return artifactFactory.createArtifact( groupId, artifactId, version, "runtime", type );
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.java
new file mode 100644 (file)
index 0000000..249d990
--- /dev/null
@@ -0,0 +1,280 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.util.DirectoryScanner;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericArtifactConsumerTest 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericArtifactConsumerTest
+    extends AbstractConsumerTestCase
+{
+    private MockArtifactConsumer getMockArtifactConsumer() throws Exception
+    {
+        return (MockArtifactConsumer) lookup(DiscovererConsumer.ROLE, "mock-artifact");
+    }
+    
+    public void testScanLegacy()
+        throws Exception
+    {
+        ArtifactRepository repository = getLegacyRepository();
+        List consumers = new ArrayList();
+
+        MockArtifactConsumer mockConsumer = getMockArtifactConsumer(); 
+
+        consumers.add( mockConsumer );
+
+        DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+
+        assertNotNull( consumers );
+
+        Iterator it = mockConsumer.getFailureMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            String msg = (String) entry.getValue();
+            System.out.println( "Failure: " + path + " -> " + msg );
+        }
+
+        assertEquals( 3, mockConsumer.getFailureMap().size() );
+
+        assertEquals( "Path does not match a legacy repository path for an artifact", mockConsumer.getFailureMap()
+            .get( "invalid/invalid-1.0.jar" ) );
+        assertEquals( "Path filename version is empty", mockConsumer.getFailureMap().get( "invalid/jars/invalid.jar" ) );
+        assertEquals( "Path does not match a legacy repository path for an artifact", mockConsumer.getFailureMap()
+            .get( "invalid/jars/1.0/invalid-1.0.jar" ) );
+
+        assertEquals( 10, mockConsumer.getArtifactMap().size() );
+    }
+
+    public void testScanDefault()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+
+        MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+
+        consumers.add( mockConsumer );
+
+        DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+        // Test Statistics
+
+        assertNotNull( stats );
+
+        assertEquals( 31, stats.getFilesConsumed() );
+        assertEquals( 0, stats.getFilesSkipped() );
+        assertEquals( 31, stats.getFilesIncluded() );
+        assertTrue( stats.getElapsedMilliseconds() > 0 );
+        assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+        assertTrue( stats.getTimestampStarted() > 0 );
+
+        // Test gathered information from Mock consumer.
+
+        Iterator it;
+
+        assertNotNull( consumers );
+
+        it = mockConsumer.getFailureMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            String msg = (String) entry.getValue();
+            System.out.println( "Failure: " + path + " -> " + msg );
+        }
+
+        assertEquals( 6, mockConsumer.getFailureMap().size() );
+
+        assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", mockConsumer
+            .getFailureMap().get( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ) );
+        assertEquals( "Path is too short to build an artifact from.", mockConsumer.getFailureMap()
+            .get( "invalid/invalid-1.0.jar" ) );
+        assertEquals( "Built artifact version does not match path version", mockConsumer.getFailureMap()
+            .get( "invalid/invalid/1.0/invalid-2.0.jar" ) );
+
+        assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+        // Test for known include artifacts
+
+        Collection artifacts = mockConsumer.getArtifactMap().values();
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, artifacts );
+        assertHasArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", artifacts );
+
+        // Test for known excluded files and dirs to validate exclusions.
+
+        it = mockConsumer.getArtifactMap().values().iterator();
+        while ( it.hasNext() )
+        {
+            Artifact a = (Artifact) it.next();
+            assertTrue( "Artifact " + a + " should have it's .getFile() set.", a.getFile() != null );
+            assertTrue( "Artifact " + a + " should have it's .getRepository() set.", a.getRepository() != null );
+            assertTrue( "Artifact " + a + " should have non-null repository url.", a.getRepository().getUrl() != null );
+            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+        }
+    }
+
+    private void assertHasArtifact( String groupId, String artifactId, String version, String type, String classifier,
+                                    Collection collection )
+    {
+        Artifact artifact = createArtifact( groupId, artifactId, version, type, classifier );
+        assertTrue( "Contains " + artifact, collection.contains( artifact ) );
+    }
+
+    /*  This relies on File.setLastModified(long) which does not work reliably on all platforms.
+     *  Notably linux and various early flavors of OSX.
+     *    - Joakim
+     *    
+     *  TODO: Research alternative way to test this.
+     */
+    public void disabledTestScanDefaultUpdatesOnly()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+
+        // Set all files in repository to August 22 1972 (old date)
+        DiscovererStatistics stats;
+        makeRepositoryOld( repository );
+        makeFileNew( repository, "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+        makeFileNew( repository, "org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar" );
+
+        // Now do the normal thing.
+
+        List consumers = new ArrayList();
+
+        MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+
+        consumers.add( mockConsumer );
+
+        stats = discoverer.scanRepository( repository, consumers, true );
+
+        // Test Statistics
+
+        assertNotNull( stats );
+
+        assertEquals( 2, stats.getFilesConsumed() );
+        assertEquals( 23, stats.getFilesSkipped() );
+        assertEquals( 2, stats.getFilesIncluded() );
+        assertTrue( stats.getElapsedMilliseconds() > 0 );
+        assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+        assertTrue( stats.getTimestampStarted() > 0 );
+
+        // Test gathered information from Mock consumer.
+
+        Iterator it;
+
+        assertNotNull( consumers );
+
+        it = mockConsumer.getFailureMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            String msg = (String) entry.getValue();
+            System.out.println( "Failure: " + path + " -> " + msg );
+        }
+
+        assertEquals( 6, mockConsumer.getFailureMap().size() );
+
+        assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", mockConsumer
+            .getFailureMap().get( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ) );
+        assertEquals( "Path is too short to build an artifact from.", mockConsumer.getFailureMap()
+            .get( "invalid/invalid-1.0.jar" ) );
+        assertEquals( "Built artifact version does not match path version", mockConsumer.getFailureMap()
+            .get( "invalid/invalid/1.0/invalid-2.0.jar" ) );
+
+        assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+        // Test for known excluded files and dirs to validate exclusions.
+
+        it = mockConsumer.getArtifactMap().values().iterator();
+        while ( it.hasNext() )
+        {
+            Artifact a = (Artifact) it.next();
+            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+        }
+    }
+
+    private void makeFileNew( ArtifactRepository repository, String path )
+    {
+        File file = new File( repository.getBasedir(), path );
+        file.setLastModified( System.currentTimeMillis() );
+    }
+
+    private void makeRepositoryOld( ArtifactRepository repository )
+        throws DiscovererException
+    {
+        Calendar cal = Calendar.getInstance();
+        cal.clear();
+        cal.set( 1972, Calendar.AUGUST, 22, 1, 1, 1 );
+        long oldTime = cal.getTimeInMillis();
+
+        DiscovererStatistics stats = new DiscovererStatistics( repository );
+        stats.setTimestampFinished( oldTime + 5000 );
+        stats.save();
+
+        DirectoryScanner scanner = new DirectoryScanner();
+        scanner.setBasedir( repository.getBasedir() );
+        scanner.addDefaultExcludes();
+        scanner.setIncludes( new String[] { "**/*" } );
+        scanner.scan();
+        String files[] = scanner.getIncludedFiles();
+        for ( int i = 0; i < files.length; i++ )
+        {
+            File file = new File( files[i] );
+
+            if ( !file.setLastModified( oldTime ) )
+            {
+                fail( "Your platform apparently does not support the File.setLastModified(long) method." );
+            }
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.java
new file mode 100644 (file)
index 0000000..3b6df1d
--- /dev/null
@@ -0,0 +1,126 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericModelConsumerTest 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericModelConsumerTest
+    extends AbstractConsumerTestCase
+{
+    private MockModelConsumer getMockModelConsumer() throws Exception
+    {
+        return (MockModelConsumer) lookup(DiscovererConsumer.ROLE, "mock-model");
+    }
+    
+    public void testScanLegacy()
+        throws Exception
+    {
+        ArtifactRepository repository = getLegacyRepository();
+        List consumers = new ArrayList();
+
+        MockModelConsumer mockConsumer = getMockModelConsumer();
+
+        consumers.add( mockConsumer );
+
+        DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+
+        assertNotNull( consumers );
+
+        Iterator it = mockConsumer.getModelMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            Model model = (Model) entry.getValue();
+            System.out.println( "Model: " + path + " -> " + model );
+        }
+
+        // TODO: Add some poms to legacy repository!
+        assertEquals( 0, mockConsumer.getModelMap().size() );
+    }
+
+    public void testScanDefault()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+
+        MockModelConsumer mockConsumer = getMockModelConsumer();
+
+        consumers.add( mockConsumer );
+
+        DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+        // Test Statistics
+
+        assertNotNull( stats );
+
+        assertEquals( 10, stats.getFilesConsumed() );
+        assertEquals( 0, stats.getFilesSkipped() );
+        assertEquals( 10, stats.getFilesIncluded() );
+        assertTrue( stats.getElapsedMilliseconds() > 0 );
+        assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+        assertTrue( stats.getTimestampStarted() > 0 );
+
+        // Test gathered information from Mock consumer.
+
+        Iterator it;
+
+        it = mockConsumer.getModelMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            Model model = (Model) entry.getValue();
+            System.out.println( "Model: " + path + " -> " + model );
+        }
+
+        assertEquals( 10, mockConsumer.getModelMap().size() );
+
+        // Test for known include metadata
+
+        // Test for known excluded files and dirs to validate exclusions.
+
+        it = mockConsumer.getModelMap().keySet().iterator();
+        while ( it.hasNext() )
+        {
+            String path = (String) it.next();
+            assertFalse( "Check not CVS", path.indexOf( "CVS" ) >= 0 );
+            assertFalse( "Check not .svn", path.indexOf( ".svn" ) >= 0 );
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.java
new file mode 100644 (file)
index 0000000..c62f074
--- /dev/null
@@ -0,0 +1,118 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericRepositoryMetadataConsumerTest 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericRepositoryMetadataConsumerTest
+    extends AbstractConsumerTestCase
+{
+    private MockRepositoryMetadataConsumer getMockRepositoryMetadataConsumer() throws Exception
+    {
+        return (MockRepositoryMetadataConsumer) lookup(DiscovererConsumer.ROLE, "mock-metadata");
+    }
+    
+    public void testScanLegacy()
+        throws Exception
+    {
+        ArtifactRepository repository = getLegacyRepository();
+        List consumers = new ArrayList();
+
+        MockRepositoryMetadataConsumer mockConsumer = getMockRepositoryMetadataConsumer();
+
+        consumers.add( mockConsumer );
+
+        try
+        {
+            discoverer.scanRepository( repository, consumers, true );
+            fail( "Should not have worked on a legacy repository." );
+        }
+        catch ( IllegalStateException e )
+        {
+            /* expected path */
+        }
+    }
+
+    public void testScanDefault()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+
+        MockRepositoryMetadataConsumer mockConsumer = getMockRepositoryMetadataConsumer();
+
+        consumers.add( mockConsumer );
+
+        DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+        // Test Statistics
+
+        assertNotNull( stats );
+
+        assertEquals( 7, stats.getFilesConsumed() );
+        assertEquals( 0, stats.getFilesSkipped() );
+        assertEquals( 7, stats.getFilesIncluded() );
+        assertTrue( stats.getElapsedMilliseconds() > 0 );
+        assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+        assertTrue( stats.getTimestampStarted() > 0 );
+
+        // Test gathered information from Mock consumer.
+
+        Iterator it;
+
+        it = mockConsumer.getRepositoryMetadataMap().entrySet().iterator();
+        while ( it.hasNext() )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String path = (String) entry.getKey();
+            RepositoryMetadata repometa = (RepositoryMetadata) entry.getValue();
+            System.out.println( "Metadata: " + path + " -> " + repometa );
+        }
+
+        assertEquals( 5, mockConsumer.getRepositoryMetadataMap().size() );
+
+        // Test for known include metadata
+
+        // Test for known excluded files and dirs to validate exclusions.
+
+        it = mockConsumer.getRepositoryMetadataMap().keySet().iterator();
+        while ( it.hasNext() )
+        {
+            String path = (String) it.next();
+            assertFalse( "Check not CVS", path.indexOf( "CVS" ) >= 0 );
+            assertFalse( "Check not .svn", path.indexOf( ".svn" ) >= 0 );
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockArtifactConsumer.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockArtifactConsumer.java
new file mode 100644 (file)
index 0000000..d6b5e18
--- /dev/null
@@ -0,0 +1,67 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.Artifact;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockArtifactConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ *     role-hint="mock-artifact"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    private Map artifactMap = new HashMap();
+
+    private Map failureMap = new HashMap();
+
+    public void processArtifact( Artifact artifact, File file )
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        artifactMap.put( relpath, artifact );
+    }
+
+    public void processArtifactBuildFailure( File path, String message )
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), path );
+        failureMap.put( relpath, message );
+    }
+
+    public Map getArtifactMap()
+    {
+        return artifactMap;
+    }
+
+    public Map getFailureMap()
+    {
+        return failureMap;
+    }
+}
\ No newline at end of file
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockModelConsumer.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockModelConsumer.java
new file mode 100644 (file)
index 0000000..5e47eed
--- /dev/null
@@ -0,0 +1,54 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.model.Model;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockModelConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ *     role-hint="mock-model"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockModelConsumer
+    extends GenericModelConsumer
+{
+    private Map modelMap = new HashMap();
+
+    public void processModel( Model model, File file )
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        modelMap.put( relpath, model );
+    }
+
+    public Map getModelMap()
+    {
+        return modelMap;
+    }
+}
\ No newline at end of file
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockRepositoryMetadataConsumer.java b/archiva-MRM-239/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/consumers/MockRepositoryMetadataConsumer.java
new file mode 100644 (file)
index 0000000..bd32980
--- /dev/null
@@ -0,0 +1,54 @@
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockRepositoryMetadataConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ *     role-hint="mock-metadata"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockRepositoryMetadataConsumer
+    extends GenericRepositoryMetadataConsumer
+{
+    private Map repositoryMetadataMap = new HashMap();
+
+    public void processRepositoryMetadata( RepositoryMetadata metadata, File file )
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        repositoryMetadataMap.put( relpath, metadata );
+    }
+
+    public Map getRepositoryMetadataMap()
+    {
+        return repositoryMetadataMap;
+    }
+}
\ No newline at end of file
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.xml b/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericArtifactConsumerTest.xml
new file mode 100644 (file)
index 0000000..fc46adc
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+      <role-hint>mock-artifact</role-hint>
+      <implementation>org.apache.maven.archiva.discoverer.consumers.MockArtifactConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.xml b/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericModelConsumerTest.xml
new file mode 100644 (file)
index 0000000..2ded1b6
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+      <role-hint>mock-model</role-hint>
+      <implementation>org.apache.maven.archiva.discoverer.consumers.MockModelConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.xml b/archiva-MRM-239/archiva-discoverer/src/test/resources/org/apache/maven/archiva/discoverer/consumers/GenericRepositoryMetadataConsumerTest.xml
new file mode 100644 (file)
index 0000000..da9864d
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+      <role-hint>mock-metadata</role-hint>
+      <implementation>org.apache.maven.archiva.discoverer.consumers.MockRepositoryMetadataConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
index 9535a6c70a9c7d7444fff59323c01e1bc7139c84..b4bdbd4f7c62fbc3852d30109a7e1ef71f61e5eb 100644 (file)
@@ -21,6 +21,7 @@ package org.apache.maven.archiva.indexer;
 
 import org.apache.maven.archiva.indexer.query.Query;
 import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
 
 import java.util.Collection;
 import java.util.List;
@@ -89,6 +90,17 @@ public interface RepositoryArtifactIndex
     Collection getAllRecordKeys()
         throws RepositoryIndexException;
 
+    /**
+     * Indexes the artifact specified. If the artifact is already in the repository they it is updated. 
+     * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
+     *
+     * @param artifact  the artifact to index
+     * @param factory   the artifact to record factory
+     * @throws RepositoryIndexException if there is a problem indexing the artifacts
+     */
+    void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+        throws RepositoryIndexException;
+    
     /**
      * Indexes the artifacts found within the specified list. If the artifacts are already in the
      * repository they are updated. This method should use less memory than indexRecords as the records can be
index 5b5f68beacf69ffe5e6c930502bc2b6a64e55054..c0a02935dd57a297e5b4279ee4e0bf8b42e6d0b1 100644 (file)
@@ -351,6 +351,40 @@ public class LuceneRepositoryArtifactIndex
             lastUpdatedTime = System.currentTimeMillis();
         }
     }
+    
+    public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+        throws RepositoryIndexException
+    {
+        IndexModifier indexModifier = null;
+        try
+        {
+            indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+            RepositoryIndexRecord record = factory.createRecord( artifact );
+
+            if ( record != null )
+            {
+                Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+                indexModifier.deleteDocuments( term );
+
+                Document document = converter.convert( record );
+                document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+                indexModifier.addDocument( document );
+            }
+            indexModifier.optimize();
+        }
+        catch ( IOException e )
+        {
+            throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+        }
+        finally
+        {
+            closeQuietly( indexModifier );
+            lastUpdatedTime = System.currentTimeMillis();
+        }
+    }    
 
     public List getAllGroupIds()
         throws RepositoryIndexException
index 2f0dd064427939837ff8cbda81df5ea930f96347..24eb199548a9db8ed4f35740ce114396cced26a5 100755 (executable)
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-jdo2</artifactId>
+      <version>1.0-alpha-8</version>
+      <exclusions>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xmlParserAPIs</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>jpox</groupId>
+      <artifactId>jpox</artifactId>
+      <version>1.1.6</version>
+      <scope>compile</scope>
+      <exclusions>
+        <!-- targeting JDK 1.4 we don't need this -->
+        <exclusion>
+          <groupId>javax.sql</groupId>
+          <artifactId>jdbc-stdext</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <!--  TEST DEPS -->
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <version>1.7.3.3</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
         <groupId>org.codehaus.modello</groupId>
         <artifactId>modello-maven-plugin</artifactId>
         <version>1.0-alpha-14-SNAPSHOT</version>
+        <configuration>
+          <version>1.0.0</version>
+          <packageWithVersion>false</packageWithVersion>
+          <model>src/main/mdo/reporting.mdo</model>
+        </configuration>
         <executions>
           <execution>
+            <id>modello-java</id>
             <goals>
-              <goal>xpp3-writer</goal>
               <goal>java</goal>
+              <goal>jpox-metadata-class</goal>
+              <!--
+              <goal>xpp3-writer</goal>
               <goal>xpp3-reader</goal>
+               -->
+            </goals>
+          </execution>
+          <execution>
+            <id>jpox-jdo-mapping</id>
+            <goals>
+              <goal>jpox-jdo-mapping</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${basedir}/target/classes/org/apache/maven/archiva/reporting/model/</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+       <groupId>org.codehaus.mojo</groupId>
+        <artifactId>jpox-maven-plugin</artifactId>
+        <version>1.1.6-SNAPSHOT</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>enhance</goal>
             </goals>
           </execution>
         </executions>
-        <configuration>
-          <version>1.0.0</version>
-          <model>src/main/mdo/reporting.mdo</model>
-        </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java
new file mode 100644 (file)
index 0000000..2854bef
--- /dev/null
@@ -0,0 +1,50 @@
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ReportingException 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ReportingException
+    extends Exception
+{
+
+    public ReportingException()
+    {
+    }
+
+    public ReportingException( String message )
+    {
+        super( message );
+    }
+
+    public ReportingException( Throwable cause )
+    {
+        super( cause );
+    }
+
+    public ReportingException( String message, Throwable cause )
+    {
+        super( message, cause );
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java
new file mode 100644 (file)
index 0000000..31fc3e0
--- /dev/null
@@ -0,0 +1,237 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+
+import java.util.List;
+
+import javax.jdo.Extent;
+import javax.jdo.JDOException;
+import javax.jdo.JDOHelper;
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.JDOUserException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * AbstractJdoResults - Base class for all JDO related results.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractJdoDatabase
+    implements Initializable
+{
+    /**
+     * @plexus.requirement role-hint="archiva"
+     */
+    private JdoFactory jdoFactory;
+
+    private PersistenceManagerFactory pmf;
+
+    // -------------------------------------------------------------------
+    // JPOX / JDO Specifics.
+    // -------------------------------------------------------------------
+
+    protected List getAllObjects( Class clazz, String ordering )
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            Extent extent = pm.getExtent( clazz, true );
+
+            Query query = pm.newQuery( extent );
+
+            if ( ordering != null )
+            {
+                query.setOrdering( ordering );
+            }
+
+//            for ( Iterator i = fetchGroups.iterator(); i.hasNext(); )
+//            {
+//                pm.getFetchPlan().addGroup( (String) i.next() );
+//            }
+
+            List result = (List) query.execute();
+
+            result = (List) pm.detachCopyAll( result );
+
+            tx.commit();
+
+            return result;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    protected Object getObjectByKey( Class clazz, Object key )
+        throws JDOObjectNotFoundException, JDOException
+    {
+        if ( key == null )
+        {
+            throw new JDOException( "Unable to get object from jdo using null key." );
+        }
+
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            //            if ( fetchGroup != null )
+            //            {
+            //                pm.getFetchPlan().addGroup( fetchGroup );
+            //            }
+
+            Object objectId = pm.newObjectIdInstance( clazz, key.toString() );
+
+            Object object = pm.getObjectById( objectId );
+
+            object = pm.detachCopy( object );
+
+            tx.commit();
+
+            return object;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    public void initialize()
+        throws InitializationException
+    {
+        pmf = jdoFactory.getPersistenceManagerFactory();
+    }
+
+    protected void removeObject( Object o )
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            o = pm.getObjectById( pm.getObjectId( o ) );
+
+            pm.deletePersistent( o );
+
+            tx.commit();
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    protected Object saveObject( Object object )
+    {
+        return saveObject( object, null );
+    }
+
+    protected Object saveObject( Object object, String fetchGroups[] )
+        throws JDOException
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            if ( ( JDOHelper.getObjectId( object ) != null ) && !JDOHelper.isDetached( object ) )
+            {
+                throw new JDOException( "Existing object is not detached: " + object );
+            }
+
+            if ( fetchGroups != null )
+            {
+                for ( int i = 0; i >= fetchGroups.length; i++ )
+                {
+                    pm.getFetchPlan().addGroup( fetchGroups[i] );
+                }
+            }
+
+            pm.makePersistent( object );
+
+            object = pm.detachCopy( object );
+
+            tx.commit();
+
+            return object;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    private PersistenceManager getPersistenceManager()
+    {
+        PersistenceManager pm = pmf.getPersistenceManager();
+
+        pm.getFetchPlan().setMaxFetchDepth( -1 );
+
+        return pm;
+    }
+
+    protected static void closePersistenceManager( PersistenceManager pm )
+    {
+        try
+        {
+            pm.close();
+        }
+        catch ( JDOUserException e )
+        {
+            // ignore
+        }
+    }
+
+    protected static void rollbackIfActive( Transaction tx )
+    {
+        PersistenceManager pm = tx.getPersistenceManager();
+
+        try
+        {
+            if ( tx.isActive() )
+            {
+                tx.rollback();
+            }
+        }
+        finally
+        {
+            closePersistenceManager( pm );
+        }
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java
new file mode 100644 (file)
index 0000000..0eee93b
--- /dev/null
@@ -0,0 +1,80 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.Result;
+
+/**
+ * AbstractResultsDatabase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractResultsDatabase
+    extends AbstractJdoDatabase
+{
+    /**
+     * <p>
+     * Get the number of failures in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of failures in the database.
+     */
+    public abstract int getNumFailures();
+
+    /**
+     * <p>
+     * Get the number of warnings in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of warnings in the database.
+     */
+    public abstract int getNumWarnings();
+    
+    /**
+     * <p>
+     * Get the number of notices in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of notices in the database.
+     */
+    public abstract int getNumNotices();
+    
+    protected static Result createResult( String processor, String problem, String reason )
+    {
+        Result result = new Result();
+        result.setProcessor( processor );
+        result.setProblem( problem );
+        result.setReason( reason );
+        return result;
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java
new file mode 100644 (file)
index 0000000..94876d5
--- /dev/null
@@ -0,0 +1,250 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.ArtifactResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * ArtifactResultsDatabase - Database of ArtifactResults. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase"
+ */
+public class ArtifactResultsDatabase
+    extends AbstractResultsDatabase
+{
+    // -------------------------------------------------------------------
+    // ArtifactResults methods.
+    // -------------------------------------------------------------------
+
+    public static final String ROLE = ArtifactResultsDatabase.class.getName();
+
+    public void addFailure( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getFailures().contains( result ) )
+        {
+            results.addFailure( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addNotice( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getNotices().contains( result ) )
+        {
+            results.addNotice( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addWarning( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getWarnings().contains( result ) )
+        {
+            results.addWarning( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void clearResults( ArtifactResults results )
+    {
+        results.getFailures().clear();
+        results.getWarnings().clear();
+        results.getNotices().clear();
+
+        saveObject( results );
+    }
+
+    public List getAllArtifactResults()
+    {
+        return getAllObjects( ArtifactResults.class, null );
+    }
+
+    public Iterator getIterator()
+    {
+        List allartifacts = getAllArtifactResults();
+        if ( allartifacts == null )
+        {
+            return Collections.EMPTY_LIST.iterator();
+        }
+
+        return allartifacts.iterator();
+    }
+
+    public void remove( ArtifactResults results )
+    {
+        removeObject( results );
+    }
+
+    public void remove( Artifact artifact )
+    {
+        try
+        {
+            ArtifactResults results = lookupArtifactResults( artifact );
+            remove( results );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            // nothing to do.
+        }
+    }
+
+    /**
+     * Get an {@link ArtifactResults} from the store.
+     * If the store does not have one, create it.
+     * 
+     * Equivalent to calling {@link #lookupArtifactResults(Artifact)} then if
+     * not found, using {@link #createArtifactResults(Artifact)}.
+     * 
+     * @param artifact the artifact information
+     * @return the ArtifactResults object (may not be in database yet, so don't forget to {@link #saveObject(Object)})
+     * @see #lookupArtifactResults(Artifact)
+     * @see #createArtifactResults(Artifact)
+     */
+    public ArtifactResults getArtifactResults( Artifact artifact )
+    {
+        ArtifactResults results;
+
+        try
+        {
+            results = lookupArtifactResults( artifact );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            results = createArtifactResults( artifact );
+        }
+
+        return results;
+    }
+
+    /**
+     * Create a new {@link ArtifactResults} object from the provided Artifact information.
+     * 
+     * @param artifact the artifact information.
+     * @return the new {@link ArtifactResults} object.
+     * @see #getArtifactResults(Artifact)
+     * @see #lookupArtifactResults(Artifact)
+     */
+    private ArtifactResults createArtifactResults( Artifact artifact )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+         * type, classifier.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        ArtifactResults results = new ArtifactResults();
+        results.setGroupId( StringUtils.defaultString( artifact.getGroupId() ) );
+        results.setArtifactId( StringUtils.defaultString( artifact.getArtifactId() ) );
+        results.setVersion( StringUtils.defaultString( artifact.getVersion() ) );
+        results.setType( StringUtils.defaultString( artifact.getType() ) );
+        results.setClassifier( StringUtils.defaultString( artifact.getClassifier() ) );
+
+        return results;
+    }
+
+    /**
+     * Lookup the {@link ArtifactResults} in the JDO store from the information in
+     * the provided Artifact.
+     * 
+     * @param artifact the artifact information.
+     * @return the previously saved {@link ArtifactResults} from the JDO store.
+     * @throws JDOObjectNotFoundException if the {@link ArtifactResults} are not found.
+     * @see #getArtifactResults(Artifact)
+     * @see #createArtifactResults(Artifact)
+     */
+    private ArtifactResults lookupArtifactResults( Artifact artifact )
+        throws JDOObjectNotFoundException
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+         * type, classifier.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        ArtifactResultsKey key = new ArtifactResultsKey();
+        key.groupId = StringUtils.defaultString( artifact.getGroupId() );
+        key.artifactId = StringUtils.defaultString( artifact.getArtifactId() );
+        key.version = StringUtils.defaultString( artifact.getVersion() );
+        key.type = StringUtils.defaultString( artifact.getType() );
+        key.classifier = StringUtils.defaultString( artifact.getClassifier() );
+
+        return (ArtifactResults) getObjectByKey( ArtifactResults.class, key );
+    }
+
+    public int getNumFailures()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getFailures().size();
+        }
+        return count;
+    }
+
+    public int getNumNotices()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getNotices().size();
+        }
+        return count;
+    }
+
+    public int getNumWarnings()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getWarnings().size();
+        }
+        return count;
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java
new file mode 100644 (file)
index 0000000..82e62f1
--- /dev/null
@@ -0,0 +1,209 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.MetadataResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * MetadataResultsDatabase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.MetadataResultsDatabase"
+ */
+public class MetadataResultsDatabase
+    extends AbstractResultsDatabase
+{
+    public static final String ROLE = MetadataResultsDatabase.class.getName();
+
+    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getFailures().contains( result ) )
+        {
+            results.addFailure( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getWarnings().contains( result ) )
+        {
+            results.addWarning( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getNotices().contains( result ) )
+        {
+            results.addNotice( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void clearResults( MetadataResults results )
+    {
+        results.getFailures().clear();
+        results.getWarnings().clear();
+        results.getNotices().clear();
+
+        saveObject( results );
+    }
+
+    public List getAllMetadataResults()
+    {
+        return getAllObjects( MetadataResults.class, null );
+    }
+
+    public Iterator getIterator()
+    {
+        List allmetadatas = getAllMetadataResults();
+        if ( allmetadatas == null )
+        {
+            return Collections.EMPTY_LIST.iterator();
+        }
+
+        return allmetadatas.iterator();
+    }
+
+    public void remove( MetadataResults results )
+    {
+        removeObject( results );
+    }
+
+    public void remove( RepositoryMetadata metadata )
+    {
+        try
+        {
+            MetadataResults results = lookupMetadataResults( metadata );
+            remove( results );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            // nothing to do.
+        }
+    }
+
+    public MetadataResults getMetadataResults( RepositoryMetadata metadata )
+    {
+        MetadataResults results;
+
+        try
+        {
+            results = lookupMetadataResults( metadata );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            results = createMetadataResults( metadata );
+        }
+
+        return results;
+    }
+
+    private MetadataResults createMetadataResults( RepositoryMetadata metadata )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        MetadataResults results = new MetadataResults();
+        results.setGroupId( StringUtils.defaultString( metadata.getGroupId() ) );
+        results.setArtifactId( StringUtils.defaultString( metadata.getArtifactId() ) );
+        results.setVersion( StringUtils.defaultString( metadata.getBaseVersion() ) );
+
+        return results;
+    }
+
+    private MetadataResults lookupMetadataResults( RepositoryMetadata metadata )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        MetadataResultsKey key = new MetadataResultsKey();
+        key.groupId = StringUtils.defaultString( metadata.getGroupId(), "" );
+        key.artifactId = StringUtils.defaultString( metadata.getArtifactId(), "" );
+        key.version = StringUtils.defaultString( metadata.getBaseVersion(), "" );
+
+        return (MetadataResults) getObjectByKey( MetadataResults.class, key );
+    }
+
+    public int getNumFailures()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getFailures().size();
+        }
+        return count;
+    }
+
+    public int getNumNotices()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getNotices().size();
+        }
+        return count;
+    }
+
+    public int getNumWarnings()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getWarnings().size();
+        }
+        return count;
+    }
+}
index 201ce4ba477f2db4c2ad0ba1616172924a9d87e5..4dd2430a55996b4239ed40805e4a61ca74e976ef 100644 (file)
@@ -19,618 +19,106 @@ package org.apache.maven.archiva.reporting.database;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Date;
-import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
 
 /**
+ * The Main Reporting Database.
+ * 
  * @todo i18n, including message formatting and parameterisation
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ReportingDatabase"
  */
 public class ReportingDatabase
 {
-    private final Reporting reporting;
-
-    private Map artifactMap;
-
-    private Map metadataMap;
-
-    private int numFailures;
-
-    private int numWarnings;
-
-    private ArtifactRepository repository;
-
-    private boolean inProgress;
-
-    private long startTime;
-
-    private final ReportGroup reportGroup;
-
-    private Set metadataWithProblems;
-
-    private Map filteredDatabases = new HashMap();
-
-    private int numNotices;
-
-    public ReportingDatabase( ReportGroup reportGroup )
-    {
-        this( reportGroup, new Reporting() );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
-    {
-        this( reportGroup, reporting, null );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
-    {
-        this( reportGroup, new Reporting(), repository );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
-    {
-        this.reportGroup = reportGroup;
-
-        this.reporting = reporting;
-
-        this.repository = repository;
-
-        initArtifactMap();
-
-        initMetadataMap();
-    }
-
-    public void addFailure( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getFailures().contains( result ) )
-        {
-            results.addFailure( result );
-            numFailures++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getNotices().contains( result ) )
-        {
-            results.addNotice( result );
-            numNotices++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getWarnings().contains( result ) )
-        {
-            results.addWarning( result );
-            numWarnings++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( artifact, processor, problem, reason );
-        }
-    }
-
-    ArtifactResults getArtifactResults( Artifact artifact )
-    {
-        return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                   artifact.getType(), artifact.getClassifier() );
-    }
-
-    private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
-                                                String classifier )
-    {
-        Map artifactMap = this.artifactMap;
-
-        String key = getArtifactKey( groupId, artifactId, version, type, classifier );
-        ArtifactResults results = (ArtifactResults) artifactMap.get( key );
-        if ( results == null )
-        {
-            results = new ArtifactResults();
-            results.setArtifactId( artifactId );
-            results.setClassifier( classifier );
-            results.setGroupId( groupId );
-            results.setType( type );
-            results.setVersion( version );
-
-            artifactMap.put( key, results );
-            reporting.getArtifacts().add( results );
-        }
-
-        return results;
-    }
-
-    private void initArtifactMap()
-    {
-        Map map = new HashMap();
-        for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-        {
-            ArtifactResults result = (ArtifactResults) i.next();
-
-            String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
-                                         result.getType(), result.getClassifier() );
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-        }
-        artifactMap = map;
-    }
-
-    private static String getArtifactKey( String groupId, String artifactId, String version, String type,
-                                          String classifier )
-    {
-        return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
-    }
-
-    private static Result createResult( String processor, String problem, String reason )
-    {
-        Result result = new Result();
-        result.setProcessor( processor );
-        result.setProblem( problem );
-        result.setReason( reason );
-        return result;
-    }
-
-    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getFailures().contains( result ) )
-        {
-            results.addFailure( result );
-            numFailures++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getWarnings().contains( result ) )
-        {
-            results.addWarning( result );
-            numWarnings++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getNotices().contains( result ) )
-        {
-            results.addNotice( result );
-            numNotices++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( metadata, processor, problem, reason );
-        }
-    }
-
-    public Set getMetadataWithProblems()
-    {
-        return metadataWithProblems;
-    }
-
-    private void initMetadataMap()
-    {
-        Map map = new HashMap();
-        Set problems = new LinkedHashSet();
-
-        for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
-        {
-            MetadataResults result = (MetadataResults) i.next();
-
-            String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
-
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-
-            if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
-            {
-                problems.add( result );
-            }
-        }
-        metadataMap = map;
-        metadataWithProblems = problems;
-    }
+    public static final String ROLE = ReportingDatabase.class.getName();
 
-    private static String getMetadataKey( String groupId, String artifactId, String version )
-    {
-        return groupId + ":" + artifactId + ":" + version;
-    }
-
-    public int getNumFailures()
-    {
-        return numFailures;
-    }
-
-    public int getNumWarnings()
-    {
-        return numWarnings;
-    }
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase artifactDatabase;
 
-    public Reporting getReporting()
-    {
-        return reporting;
-    }
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase metadataDatabase;
 
     public Iterator getArtifactIterator()
     {
-        return reporting.getArtifacts().iterator();
+        return artifactDatabase.getIterator();
     }
 
     public Iterator getMetadataIterator()
     {
-        return reporting.getMetadata().iterator();
+        return metadataDatabase.getIterator();
     }
 
-    public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+    public void clear()
     {
-        String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
-        Map map = metadataMap;
-        MetadataResults results = (MetadataResults) map.get( key );
-        return results != null && results.getLastModified() >= timestamp;
     }
 
     /**
-     * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
-     *
-     * @param metadata     the metadata
-     * @param lastModified the modification time of the file being tracked
+     * <p>
+     * Get the number of failures in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of failures in the database.
      */
-    public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
-    {
-        MetadataResults results = getMetadataResults( metadata, lastModified );
-
-        results.setLastModified( lastModified );
-
-        numFailures -= results.getFailures().size();
-        results.getFailures().clear();
-
-        numWarnings -= results.getWarnings().size();
-        results.getWarnings().clear();
-
-        numNotices -= results.getWarnings().size();
-        results.getNotices().clear();
-
-        metadataWithProblems.remove( results );
-    }
-
-    MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
-    {
-        return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
-                                   lastModified );
-    }
-
-    private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
-                                                long lastModified )
-    {
-        String key = getMetadataKey( groupId, artifactId, baseVersion );
-        Map metadataMap = this.metadataMap;
-        MetadataResults results = (MetadataResults) metadataMap.get( key );
-        if ( results == null )
-        {
-            results = new MetadataResults();
-            results.setArtifactId( artifactId );
-            results.setGroupId( groupId );
-            results.setVersion( baseVersion );
-            results.setLastModified( lastModified );
-
-            metadataMap.put( key, results );
-            reporting.getMetadata().add( results );
-        }
-        return results;
-    }
-
-    public void removeArtifact( Artifact artifact )
-    {
-        Map map = artifactMap;
-
-        String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                     artifact.getType(), artifact.getClassifier() );
-        ArtifactResults results = (ArtifactResults) map.get( key );
-        if ( results != null )
-        {
-            for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                if ( results.equals( i.next() ) )
-                {
-                    i.remove();
-                }
-            }
-
-            numFailures -= results.getFailures().size();
-            numWarnings -= results.getWarnings().size();
-            numNotices -= results.getNotices().size();
-
-            map.remove( key );
-        }
-    }
-
-    public ArtifactRepository getRepository()
-    {
-        return repository;
-    }
-
-    public boolean isInProgress()
-    {
-        return inProgress;
-    }
-
-    public void setInProgress( boolean inProgress )
-    {
-        this.inProgress = inProgress;
-
-        if ( inProgress )
-        {
-            startTime = System.currentTimeMillis();
-        }
-    }
-
-    public void clear()
-    {
-        // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
-        numWarnings = 0;
-        numNotices = 0;
-        numFailures = 0;
-
-        artifactMap.clear();
-        metadataMap.clear();
-        metadataWithProblems.clear();
-        filteredDatabases.clear();
-
-        reporting.getArtifacts().clear();
-        reporting.getMetadata().clear();
-
-        updateTimings();
-    }
-
-    public void setStartTime( long startTime )
-    {
-        this.startTime = startTime;
-    }
-
-    public long getStartTime()
-    {
-        return startTime;
-    }
-
-    public void updateTimings()
-    {
-        long startTime = getStartTime();
-        Date endTime = new Date();
-        if ( startTime > 0 )
-        {
-            getReporting().setExecutionTime( endTime.getTime() - startTime );
-        }
-        getReporting().setLastModified( endTime.getTime() );
-    }
-
-    public ReportGroup getReportGroup()
+    public int getNumFailures()
     {
-        return reportGroup;
+        int count = 0;
+        count += artifactDatabase.getNumFailures();
+        count += metadataDatabase.getNumFailures();
+        return count;
     }
 
-    public ReportingDatabase getFilteredDatabase( String filter )
+    /**
+     * <p>
+     * Get the number of notices in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of notices in the database.
+     */
+    public int getNumNotices()
     {
-        ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
-
-        if ( reportingDatabase == null )
-        {
-            reportingDatabase = new ReportingDatabase( reportGroup, repository );
-
-            Reporting reporting = reportingDatabase.getReporting();
-            reporting.setExecutionTime( this.reporting.getExecutionTime() );
-            reporting.setLastModified( this.reporting.getLastModified() );
-
-            for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                ArtifactResults results = (ArtifactResults) i.next();
-                ArtifactResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-            for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
-            {
-                MetadataResults results = (MetadataResults) i.next();
-                MetadataResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-
-            filteredDatabases.put( filter, reportingDatabase );
-        }
-
-        return reportingDatabase;
+        int count = 0;
+        count += artifactDatabase.getNumNotices();
+        count += metadataDatabase.getNumNotices();
+        return count;
     }
 
-    private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+    /**
+     * <p>
+     * Get the number of warnings in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of warnings in the database.
+     */
+    public int getNumWarnings()
     {
-        MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
-                                                                              results.getArtifactId(),
-                                                                              results.getVersion(),
-                                                                              results.getLastModified() );
-        reportingDatabase.metadataWithProblems.add( targetResults );
-        return targetResults;
+        int count = 0;
+        count += artifactDatabase.getNumWarnings();
+        count += metadataDatabase.getNumWarnings();
+        return count;
     }
 
-    private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+    public ArtifactResultsDatabase getArtifactDatabase()
     {
-        return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
-                                                     results.getVersion(), results.getType(), results.getClassifier() );
+        return artifactDatabase;
     }
 
-    public int getNumNotices()
+    public MetadataResultsDatabase getMetadataDatabase()
     {
-        return numNotices;
+        return metadataDatabase;
     }
 }
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java
deleted file mode 100644 (file)
index 8bc4b9d..0000000
+++ /dev/null
@@ -1,247 +0,0 @@
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Report executor implementation.
- *
- * @todo should the report set be limitable by configuration?
- * @plexus.component
- */
-public class DefaultReportExecutor
-    extends AbstractLogEnabled
-    implements ReportExecutor
-{
-    /**
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportingStore reportingStore;
-
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = metadata.iterator(); i.hasNext(); )
-        {
-            RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
-
-            File file =
-                new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
-            reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
-
-            reportGroup.processMetadata( repositoryMetadata, repository, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-
-            Model model = null;
-            try
-            {
-                Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
-                                                                              artifact.getArtifactId(),
-                                                                              artifact.getVersion() );
-                MavenProject project =
-                    projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
-
-                model = project.getModel();
-            }
-            catch ( InvalidArtifactRTException e )
-            {
-                reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
-            }
-            catch ( ProjectBuildingException e )
-            {
-                reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
-            }
-
-            reporter.removeArtifact( artifact );
-
-            reportGroup.processArtifact( artifact, model, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        getLogger().debug(
-            "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
-        return reportingStore.getReportsFromStore( repository, reportGroup );
-    }
-
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException
-    {
-        // Flush (as in toilet, not store) the report database
-        ReportingDatabase database = getReportDatabase( repository, reportGroup );
-        database.clear();
-
-        // Discovery process
-        String layoutProperty = getRepositoryLayout( repository.getLayout() );
-        ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-
-        // Save some memory by not tracking paths we won't use
-        // TODO: Plexus CDC should be able to inject this configuration
-        discoverer.setTrackOmittedPaths( false );
-
-        List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-        if ( !artifacts.isEmpty() )
-        {
-            getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
-
-            // Work through these in batches, then flush the project cache.
-            for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-            {
-                int end = j + ARTIFACT_BUFFER_SIZE;
-                List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                // run the reports.
-                runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                // around that. TODO: remove when it is configurable
-                flushProjectBuilderCacheHack();
-            }
-        }
-
-        MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
-        List metadata =
-            metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-
-        if ( !metadata.isEmpty() )
-        {
-            getLogger().info( "Discovered " + metadata.size() + " metadata files" );
-
-            // run the reports
-            runMetadataReports( reportGroup, metadata, repository );
-        }
-    }
-
-    private String getRepositoryLayout( ArtifactRepositoryLayout layout )
-    {
-        // gross limitation that there is no reverse lookup of the hint for the layout.
-        if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
-        {
-            return "default";
-        }
-        else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
-        {
-            return "legacy";
-        }
-        else
-        {
-            throw new IllegalArgumentException( "Unknown layout: " + layout );
-        }
-    }
-
-    private void flushProjectBuilderCacheHack()
-    {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java
deleted file mode 100644 (file)
index d6f7b5d..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Executes a report or report group.
- */
-public interface ReportExecutor
-{
-    /**
-     * Plexus component role name.
-     */
-    String ROLE = ReportExecutor.class.getName();
-
-    /**
-     * Run reports on a set of metadata.
-     *
-     * @param reportGroup the report set to run
-     * @param metadata    the RepositoryMetadata objects to report on
-     * @param repository  the repository that they come from
-     * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException
-     *          if there is a problem reading/writing the report database
-     */
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Run reports on a set of artifacts.
-     *
-     * @param reportGroup the report set to run
-     * @param artifacts   the Artifact objects to report on
-     * @param repository  the repository that they come from
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     */
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Get the report database in use for a given repository.
-     *
-     * @param repository  the repository
-     * @param reportGroup the report set to run
-     * @return the report database
-     * @throws ReportingStoreException if there is a problem reading the report database
-     */
-    ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
-     *
-     * @param repository          the repository to run from
-     * @param blacklistedPatterns the patterns to exclude during discovery
-     * @param filter              the filter to use during discovery to get a consistent list of artifacts
-     * @param reportGroup         the report set to run
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     * @throws org.apache.maven.archiva.discoverer.DiscovererException
-     *                                 if there is a problem finding the artifacts and metadata to report on
-     */
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException;
-}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java
deleted file mode 100644 (file)
index f1158ad..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-package org.apache.maven.archiva.reporting.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Implementation of a reporting filter. Artifacts already in the database are ignored.
- */
-public class ReportingMetadataFilter
-    implements MetadataFilter
-{
-    private final ReportingDatabase reporter;
-
-    public ReportingMetadataFilter( ReportingDatabase reporter )
-    {
-        this.reporter = reporter;
-    }
-
-    public boolean include( RepositoryMetadata metadata, long timestamp )
-    {
-        return !reporter.isMetadataUpToDate( metadata, timestamp );
-    }
-}
index 2051f7d4b1a1c68d513cdf93d9e0b6d9d626190f..4e5acdddbba178a3f1b8dfebaf62c0de119184a3 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
 import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
 import org.apache.maven.artifact.Artifact;
@@ -46,7 +45,7 @@ public abstract class AbstractReportGroup
      */
     private Map metadataReports;
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
+    public void processArtifact( Artifact artifact, Model model )
     {
         for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
         {
@@ -56,13 +55,12 @@ public abstract class AbstractReportGroup
             {
                 ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
 
-                report.processArtifact( artifact, model, reportingDatabase );
+                report.processArtifact( artifact, model );
             }
         }
     }
 
-    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                                 ReportingDatabase reportingDatabase )
+    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository )
     {
         for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
         {
@@ -72,7 +70,7 @@ public abstract class AbstractReportGroup
             {
                 MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
 
-                report.processMetadata( repositoryMetadata, repository, reportingDatabase );
+                report.processMetadata( repositoryMetadata, repository );
             }
         }
     }
index df26343ac34dc7d94f0bfbc38bf471abee4a428d..58b8e2f635ada9d0a35b8e834291d78cc33e3df6 100644 (file)
@@ -25,7 +25,8 @@ import java.util.Map;
 /**
  * The default report set, for repository health.
  *
- * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health"
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" 
+ *      role-hint="health"
  * @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
  */
 public class DefaultReportGroup
@@ -62,9 +63,4 @@ public class DefaultReportGroup
     {
         return "Repository Health";
     }
-
-    public String getFilename()
-    {
-        return "health-report.xml";
-    }
 }
index 30fe412e8eac45f18512a6febf9c9336eb00edd6..fa34b223335dac177291bdd80cb3c4338c5f62c8 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
@@ -43,19 +42,16 @@ public interface ReportGroup
      *
      * @param artifact          the artifact to process
      * @param model             the POM associated with the artifact to process
-     * @param reportingDatabase the report database to store results in
      */
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
+    void processArtifact( Artifact artifact, Model model );
 
     /**
      * Run any metadata related reports in the report set.
      *
      * @param repositoryMetadata the metadata to process
      * @param repository         the repository the metadata is located in
-     * @param reportingDatabase  the report database to store results in
      */
-    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                          ReportingDatabase reportingDatabase );
+    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository );
 
     /**
      * Whether a report with the given role hint is included in this report set.
@@ -79,11 +75,4 @@ public interface ReportGroup
      * @return the report name
      */
     String getName();
-
-    /**
-     * Get the filename of the reports within the repository's reports directory.
-     *
-     * @return the filename
-     */
-    String getFilename();
 }
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java
new file mode 100644 (file)
index 0000000..3494479
--- /dev/null
@@ -0,0 +1,158 @@
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * ArtifactResultsKey - used by jpox for application identity for the {@link ArtifactResults} object and table. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsKey
+    implements Serializable
+{
+    public String groupId = "";
+
+    public String artifactId = "";
+
+    public String version = "";
+
+    public String type = "";
+
+    public String classifier = "";
+
+    public ArtifactResultsKey()
+    {
+        /* do nothing */
+    }
+
+    public ArtifactResultsKey( String key )
+    {
+        String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+        groupId = parts[0];
+        artifactId = parts[1];
+        version = parts[2];
+        type = parts[3];
+        classifier = parts[4];
+    }
+
+    public String toString()
+    {
+        return StringUtils.join( new String[] { groupId, artifactId, version, type, classifier }, ':' );
+    }
+
+    public int hashCode()
+    {
+        final int PRIME = 31;
+        int result = 1;
+        result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+        result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+        result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+        result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() );
+        result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() );
+        return result;
+    }
+
+    public boolean equals( Object obj )
+    {
+        if ( this == obj )
+        {
+            return true;
+        }
+
+        if ( obj == null )
+        {
+            return false;
+        }
+
+        if ( getClass() != obj.getClass() )
+        {
+            return false;
+        }
+
+        final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+        if ( groupId == null )
+        {
+            if ( other.groupId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !groupId.equals( other.groupId ) )
+        {
+            return false;
+        }
+
+        if ( artifactId == null )
+        {
+            if ( other.artifactId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !artifactId.equals( other.artifactId ) )
+        {
+            return false;
+        }
+
+        if ( version == null )
+        {
+            if ( other.version != null )
+            {
+                return false;
+            }
+        }
+        else if ( !version.equals( other.version ) )
+        {
+            return false;
+        }
+
+        if ( type == null )
+        {
+            if ( other.type != null )
+            {
+                return false;
+            }
+        }
+        else if ( !type.equals( other.type ) )
+        {
+            return false;
+        }
+
+        if ( classifier == null )
+        {
+            if ( other.classifier != null )
+            {
+                return false;
+            }
+        }
+        else if ( !classifier.equals( other.classifier ) )
+        {
+            return false;
+        }
+
+        return true;
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java
new file mode 100644 (file)
index 0000000..aeaff3b
--- /dev/null
@@ -0,0 +1,126 @@
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * MetadataResultsKey - used by jpox for application identity for the {@link MetadataResults} object and table. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsKey
+    implements Serializable
+{
+    public String groupId = "";
+
+    public String artifactId = "";
+
+    public String version = "";
+
+    public MetadataResultsKey()
+    {
+        /* do nothing */
+    }
+
+    public MetadataResultsKey( String key )
+    {
+        String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+        groupId = parts[0];
+        artifactId = parts[1];
+        version = parts[2];
+    }
+
+    public String toString()
+    {
+        return StringUtils.join( new String[] { groupId, artifactId, version }, ':' );
+    }
+
+    public int hashCode()
+    {
+        final int PRIME = 31;
+        int result = 1;
+        result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+        result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+        result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+        return result;
+    }
+
+    public boolean equals( Object obj )
+    {
+        if ( this == obj )
+        {
+            return true;
+        }
+
+        if ( obj == null )
+        {
+            return false;
+        }
+
+        if ( getClass() != obj.getClass() )
+        {
+            return false;
+        }
+
+        final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+        if ( groupId == null )
+        {
+            if ( other.groupId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !groupId.equals( other.groupId ) )
+        {
+            return false;
+        }
+
+        if ( artifactId == null )
+        {
+            if ( other.artifactId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !artifactId.equals( other.artifactId ) )
+        {
+            return false;
+        }
+
+        if ( version == null )
+        {
+            if ( other.version != null )
+            {
+                return false;
+            }
+        }
+        else if ( !version.equals( other.version ) )
+        {
+            return false;
+        }
+
+        return true;
+    }    
+}
index 79b7d53f0d2522c30b06faae419e51fedd4ca533..52e63c5e71d2d5630d43ea76008285e11551d18b 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.model.Model;
 
@@ -31,5 +30,5 @@ public interface ArtifactReportProcessor
 {
     String ROLE = ArtifactReportProcessor.class.getName();
 
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
+    void processArtifact( Artifact artifact, Model model );
 }
index 7660dfcb0a60188b8738b8fc99594a2e502fb0e6..04a31af7e0b583ced16c940cab8724e427397696 100644 (file)
@@ -22,7 +22,7 @@ package org.apache.maven.archiva.reporting.processor;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.layer.RepositoryQueryLayer;
 import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -61,6 +61,11 @@ public class BadMetadataReportProcessor
      */
     private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
 
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
     private static final String ROLE_HINT = "bad-metadata";
 
     /**
@@ -70,18 +75,17 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
         if ( metadata.storedInGroupDirectory() )
         {
             try
             {
-                checkPluginMetadata( metadata, repository, reporter );
+                checkPluginMetadata( metadata, repository );
             }
             catch ( IOException e )
             {
-                addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
+                addWarning( metadata, null, "Error getting plugin artifact directories versions: " + e );
             }
         }
         else
@@ -98,36 +102,34 @@ public class BadMetadataReportProcessor
             }
             if ( !found )
             {
-                addFailure( reporter, metadata, "missing-last-updated",
-                            "Missing lastUpdated element inside the metadata." );
+                addFailure( metadata, "missing-last-updated", "Missing lastUpdated element inside the metadata." );
             }
 
             if ( metadata.storedInArtifactVersionDirectory() )
             {
-                checkSnapshotMetadata( metadata, repository, reporter );
+                checkSnapshotMetadata( metadata, repository );
             }
             else
             {
-                checkMetadataVersions( metadata, repository, reporter );
+                checkMetadataVersions( metadata, repository );
 
                 try
                 {
-                    checkRepositoryVersions( metadata, repository, reporter );
+                    checkRepositoryVersions( metadata, repository );
                 }
                 catch ( IOException e )
                 {
                     String reason = "Error getting plugin artifact directories versions: " + e;
-                    addWarning( reporter, metadata, null, reason );
+                    addWarning( metadata, null, reason );
                 }
             }
         }
     }
 
-    private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addWarning( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+        database.addWarning( metadata, ROLE_HINT, problem, reason );
     }
 
     /**
@@ -137,12 +139,11 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                      ReportingDatabase reporter )
+    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
         throws IOException
     {
-        File metadataDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+        File metadataDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+            .getParentFile();
         List pluginDirs = getArtifactIdFiles( metadataDir );
 
         Map prefixes = new HashMap();
@@ -153,22 +154,22 @@ public class BadMetadataReportProcessor
             String artifactId = plugin.getArtifactId();
             if ( artifactId == null || artifactId.length() == 0 )
             {
-                addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+                addFailure( metadata, "missing-artifact-id:" + plugin.getPrefix(),
                             "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
             }
 
             String prefix = plugin.getPrefix();
             if ( prefix == null || prefix.length() == 0 )
             {
-                addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+                addFailure( metadata, "missing-plugin-prefix:" + artifactId,
                             "Missing or empty plugin prefix for artifactId " + artifactId + "." );
             }
             else
             {
                 if ( prefixes.containsKey( prefix ) )
                 {
-                    addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
-                                "Duplicate plugin prefix found: " + prefix + "." );
+                    addFailure( metadata, "duplicate-plugin-prefix:" + prefix, "Duplicate plugin prefix found: "
+                        + prefix + "." );
                 }
                 else
                 {
@@ -181,8 +182,8 @@ public class BadMetadataReportProcessor
                 File pluginDir = new File( metadataDir, artifactId );
                 if ( !pluginDirs.contains( pluginDir ) )
                 {
-                    addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
-                                "Metadata plugin " + artifactId + " not found in the repository" );
+                    addFailure( metadata, "missing-plugin-from-repository:" + artifactId, "Metadata plugin "
+                        + artifactId + " not found in the repository" );
                 }
                 else
                 {
@@ -196,8 +197,8 @@ public class BadMetadataReportProcessor
             for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
             {
                 File plugin = (File) plugins.next();
-                addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
-                    plugin.getName() + " is present in the repository but " + "missing in the metadata." );
+                addFailure( metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + plugin.getName()
+                    + " is present in the repository but " + "missing in the metadata." );
             }
         }
     }
@@ -209,27 +210,26 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
+    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+        RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
 
         Versioning versioning = metadata.getMetadata().getVersioning();
         if ( versioning != null )
         {
             Snapshot snapshot = versioning.getSnapshot();
 
-            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
-                                                  snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
-            Artifact artifact =
-                artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, snapshot
+                .getTimestamp()
+                + "-" + snapshot.getBuildNumber() );
+            Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(),
+                                                                       version );
             artifact.isSnapshot(); // trigger baseVersion correction
 
             if ( !repositoryQueryLayer.containsArtifact( artifact ) )
             {
-                addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
-                            "Snapshot artifact " + version + " does not exist." );
+                addFailure( metadata, "missing-snapshot-artifact-from-repository:" + version, "Snapshot artifact "
+                    + version + " does not exist." );
             }
         }
     }
@@ -241,11 +241,9 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
+    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository )
     {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+        RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
 
         Versioning versioning = metadata.getMetadata().getVersioning();
         if ( versioning != null )
@@ -254,13 +252,13 @@ public class BadMetadataReportProcessor
             {
                 String version = (String) versions.next();
 
-                Artifact artifact =
-                    artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+                Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata
+                    .getArtifactId(), version );
 
                 if ( !repositoryQueryLayer.containsArtifact( artifact ) )
                 {
-                    addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
-                        version + " is present in metadata but " + "missing in the repository." );
+                    addFailure( metadata, "missing-artifact-from-repository:" + version, "Artifact version " + version
+                        + " is present in metadata but " + "missing in the repository." );
                 }
             }
         }
@@ -275,14 +273,13 @@ public class BadMetadataReportProcessor
      * @param reporter   the ReportingDatabase to receive processing results
      * @throws java.io.IOException if there is a problem reading from the file system
      */
-    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                          ReportingDatabase reporter )
+    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository )
         throws IOException
     {
         Versioning versioning = metadata.getMetadata().getVersioning();
         List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
-        File versionsDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+        File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+            .getParentFile();
 
         // TODO: I don't know how this condition can happen, but it was seen on the main repository.
         // Avoid hard failure
@@ -295,14 +292,14 @@ public class BadMetadataReportProcessor
                 String version = path.getParentFile().getName();
                 if ( !metadataVersions.contains( version ) )
                 {
-                    addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
-                        version + " found in the repository but " + "missing in the metadata." );
+                    addFailure( metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + version
+                        + " found in the repository but " + "missing in the metadata." );
                 }
             }
         }
         else
         {
-            addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
+            addFailure( metadata, null, "Metadata's directory did not exist: " + versionsDir );
         }
     }
 
@@ -339,10 +336,9 @@ public class BadMetadataReportProcessor
         return artifactIdFiles;
     }
 
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addFailure( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+        database.addFailure( metadata, ROLE_HINT, problem, reason );
     }
 }
index 36b60cc2ee96c91a89265161f36c109fb3a127ae..8bd5e141f331ca545f48b7c5c632682e8b4079eb 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -48,10 +48,15 @@ public class ChecksumArtifactReportProcessor
      * @plexus.requirement role-hint="md5"
      */
     private Digester md5Digester;
+    
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
 
     private static final String ROLE_HINT = "checksum";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
@@ -68,11 +73,11 @@ public class ChecksumArtifactReportProcessor
 
         // TODO: make md5 configurable
 //        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, artifact );
     }
 
     private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, Artifact artifact )
+                                 Artifact artifact )
     {
         File checksumFile = new File( repository.getBasedir(), path );
         if ( checksumFile.exists() )
@@ -83,23 +88,23 @@ public class ChecksumArtifactReportProcessor
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
+                addFailure( artifact, "checksum-wrong", e.getMessage() );
             }
             catch ( IOException e )
             {
-                addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
+                addFailure( artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
             }
         }
         else
         {
-            addFailure( reporter, artifact, "checksum-missing",
+            addFailure( artifact, "checksum-missing",
                         digester.getAlgorithm() + " checksum file does not exist." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 }
index 1f0e860a797b27fa1cf35eeba3fe0f7bd544eb82..84313dc52fa936f2df8da6189bedc1dfccad9749 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
 import org.codehaus.plexus.digest.Digester;
@@ -48,32 +48,36 @@ public class ChecksumMetadataReportProcessor
      */
     private Digester md5Digester;
 
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
     private static final String ROLE_HINT = "checksum-metadata";
 
     /**
      * Validate the checksums of the metadata. Get the metadata file from the
      * repository then validate the checksum.
      */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         //check if checksum files exist
         String path = repository.pathOfRemoteRepositoryMetadata( metadata );
         File file = new File( repository.getBasedir(), path );
 
-        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+        verifyChecksum( repository, path + ".md5", file, md5Digester, metadata );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, metadata );
     }
 
     private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, RepositoryMetadata metadata )
+                                 RepositoryMetadata metadata )
     {
         File checksumFile = new File( repository.getBasedir(), path );
         if ( checksumFile.exists() )
@@ -84,25 +88,23 @@ public class ChecksumMetadataReportProcessor
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
+                addFailure( metadata, "checksum-wrong", e.getMessage() );
             }
             catch ( IOException e )
             {
-                addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
+                addFailure( metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
             }
         }
         else
         {
-            addFailure( reporter, metadata, "checksum-missing",
-                        digester.getAlgorithm() + " checksum file does not exist." );
+            addFailure( metadata, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addFailure( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+        database.addFailure( metadata, ROLE_HINT, problem, reason );
     }
 
 }
index 2d5bf80b4d305b7656f6705ae84d54bcb135494a..049767c2dfdfbff9a5169d01137ceea081422200 100644 (file)
@@ -21,7 +21,7 @@ package org.apache.maven.archiva.reporting.processor;
 
 import org.apache.maven.archiva.layer.RepositoryQueryLayer;
 import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
@@ -49,34 +49,39 @@ public class DependencyArtifactReportProcessor
      */
     private RepositoryQueryLayerFactory layerFactory;
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     private static final String POM = "pom";
 
     private static final String ROLE_HINT = "dependency";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
         if ( !queryLayer.containsArtifact( artifact ) )
         {
             // TODO: is this even possible?
-            addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+            addFailure( artifact, "missing-artifact", "Artifact does not exist in the repository" );
         }
 
         if ( model != null && POM.equals( artifact.getType() ) )
         {
             List dependencies = model.getDependencies();
-            processDependencies( dependencies, reporter, queryLayer, artifact );
+            processDependencies( dependencies, queryLayer, artifact );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
-    private void processDependencies( List dependencies, ReportingDatabase reporter,
-                                      RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
+    private void processDependencies( List dependencies, RepositoryQueryLayer repositoryQueryLayer,
+                                      Artifact sourceArtifact )
     {
         if ( dependencies.size() > 0 )
         {
@@ -100,19 +105,19 @@ public class DependencyArtifactReportProcessor
 
                     if ( !repositoryQueryLayer.containsArtifact( artifact ) )
                     {
-                        String reason = MessageFormat.format(
-                            "Artifact''s dependency {0} does not exist in the repository",
-                            new String[]{getDependencyString( dependency )} );
-                        addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
-                                    reason );
+                        String reason = MessageFormat
+                            .format( "Artifact''s dependency {0} does not exist in the repository",
+                                     new String[] { getDependencyString( dependency ) } );
+                        addFailure( sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), reason );
                     }
                 }
                 catch ( InvalidVersionSpecificationException e )
                 {
                     String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
-                                                          new String[]{getDependencyString( dependency ),
-                                                              dependency.getVersion()} );
-                    addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
+                                                          new String[] {
+                                                              getDependencyString( dependency ),
+                                                              dependency.getVersion() } );
+                    addFailure( sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
                 }
             }
         }
@@ -156,7 +161,7 @@ public class DependencyArtifactReportProcessor
         }
 
         return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
-                                                         dependency.getType(), dependency.getClassifier(),
-                                                         dependency.getScope() );
+                                                         dependency.getType(), dependency.getClassifier(), dependency
+                                                             .getScope() );
     }
 }
index 4c15eab455aecefcaaed7b8c8e7f8aee76821718..5dff8c50ef2de3cea7df8d5251090976e9a2723f 100644 (file)
@@ -27,7 +27,7 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
 import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
 import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
 import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -62,9 +62,14 @@ public class DuplicateArtifactFileReportProcessor
      */
     private String indexDirectory;
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     private static final String ROLE_HINT = "duplicate";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
         if ( artifact.getFile() != null )
@@ -82,16 +87,16 @@ public class DuplicateArtifactFileReportProcessor
             }
             catch ( DigesterException e )
             {
-                addWarning( reporter, artifact, null,
-                            "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+                addWarning( artifact, null, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
             }
 
             if ( checksum != null )
             {
                 try
                 {
-                    List results = index.search( new LuceneQuery(
-                        new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+                    List results = index
+                        .search( new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum
+                            .toLowerCase() ) ) ) );
 
                     if ( !results.isEmpty() )
                     {
@@ -106,8 +111,7 @@ public class DuplicateArtifactFileReportProcessor
                                 String groupId = artifact.getGroupId();
                                 if ( groupId.equals( result.getGroupId() ) )
                                 {
-                                    addFailure( reporter, artifact, "duplicate",
-                                                "Found duplicate for " + artifact.getId() );
+                                    addFailure( artifact, "duplicate", "Found duplicate for " + artifact.getId() );
                                 }
                             }
                         }
@@ -115,25 +119,25 @@ public class DuplicateArtifactFileReportProcessor
                 }
                 catch ( RepositoryIndexSearchException e )
                 {
-                    addWarning( reporter, artifact, null, "Failed to search in index" + e );
+                    addWarning( artifact, null, "Failed to search in index" + e );
                 }
             }
         }
         else
         {
-            addWarning( reporter, artifact, null, "Artifact file is null" );
+            addWarning( artifact, null, "Artifact file is null" );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addWarning( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+        database.addWarning( artifact, ROLE_HINT, problem, reason );
     }
 }
index 1d4a6a64e681414b54eb8d98da45b21ae8790578..05977be8624b0872fa409ad2a93fee099fd4ab08 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -43,19 +43,24 @@ public class InvalidPomArtifactReportProcessor
 {
     private static final String ROLE_HINT = "invalid-pom";
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     /**
      * @param artifact The pom xml file to be validated, passed as an artifact object.
      * @param reporter The artifact reporter object.
      */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         if ( "pom".equals( artifact.getType().toLowerCase() ) )
@@ -64,7 +69,7 @@ public class InvalidPomArtifactReportProcessor
 
             if ( !f.exists() )
             {
-                addFailure( reporter, artifact, "pom-missing", "POM not found." );
+                addFailure( artifact, "pom-missing", "POM not found." );
             }
             else
             {
@@ -79,13 +84,12 @@ public class InvalidPomArtifactReportProcessor
                 }
                 catch ( XmlPullParserException e )
                 {
-                    addFailure( reporter, artifact, "pom-parse-exception",
+                    addFailure( artifact, "pom-parse-exception",
                                 "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
                 }
                 catch ( IOException e )
                 {
-                    addFailure( reporter, artifact, "pom-io-exception",
-                                "Error while reading the pom xml file: " + e.getMessage() );
+                    addFailure( artifact, "pom-io-exception", "Error while reading the pom xml file: " + e.getMessage() );
                 }
                 finally
                 {
@@ -95,9 +99,9 @@ public class InvalidPomArtifactReportProcessor
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 }
index cfd5944510f4544968c778a02441a59fd7ed6316..a8f5129b18cecc7bc5f694116e5755037100f454 100644 (file)
@@ -20,14 +20,13 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.project.MavenProjectBuilder;
 import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
 
 import java.io.File;
@@ -57,13 +56,19 @@ public class LocationArtifactReportProcessor
     private ArtifactFactory artifactFactory;
 
     // TODO: share with other code with the same
-    private static final Set JAR_FILE_TYPES =
-        new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+    private static final Set JAR_FILE_TYPES = new HashSet( Arrays.asList( new String[] {
+        "jar",
+        "war",
+        "par",
+        "ejb",
+        "ear",
+        "rar",
+        "sar" } ) );
 
     /**
      * @plexus.requirement
      */
-    private MavenProjectBuilder projectBuilder;
+    private ArtifactResultsDatabase database;
 
     private static final String POM = "pom";
 
@@ -77,15 +82,15 @@ public class LocationArtifactReportProcessor
      * location is valid based on the location specified in the pom. Check if the both the location
      * specified in the file system pom and in the pom included in the package is the same.
      */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         adjustDistributionArtifactHandler( artifact );
@@ -100,19 +105,16 @@ public class LocationArtifactReportProcessor
             {
                 //check if the artifact is located in its proper location based on the info
                 //specified in the model object/pom
-                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
-                                                                                       model.getArtifactId(),
-                                                                                       model.getVersion(),
-                                                                                       artifact.getType(),
-                                                                                       artifact.getClassifier() );
+                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), model
+                    .getArtifactId(), model.getVersion(), artifact.getType(), artifact.getClassifier() );
 
                 adjustDistributionArtifactHandler( modelArtifact );
                 String modelPath = repository.pathOf( modelArtifact );
                 if ( !modelPath.equals( artifactPath ) )
                 {
-                    addFailure( reporter, artifact, "repository-pom-location",
-                                "The artifact is out of place. It does not match the specified location in the repository pom: " +
-                                    modelPath );
+                    addFailure( artifact, "repository-pom-location",
+                                "The artifact is out of place. It does not match the specified location in the repository pom: "
+                                    modelPath );
                 }
             }
         }
@@ -126,7 +128,7 @@ public class LocationArtifactReportProcessor
             {
                 //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
                 //check if the pom is included in the package
-                Model extractedModel = readArtifactModel( file, artifact, reporter );
+                Model extractedModel = readArtifactModel( file, artifact );
 
                 if ( extractedModel != null )
                 {
@@ -136,7 +138,7 @@ public class LocationArtifactReportProcessor
                                                                                       extractedModel.getPackaging() );
                     if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
                     {
-                        addFailure( reporter, artifact, "packaged-pom-location",
+                        addFailure( artifact, "packaged-pom-location",
                                     "The artifact is out of place. It does not match the specified location in the packaged pom." );
                     }
                 }
@@ -144,15 +146,14 @@ public class LocationArtifactReportProcessor
         }
         else
         {
-            addFailure( reporter, artifact, "missing-artifact",
-                        "The artifact file [" + file + "] cannot be found for metadata." );
+            addFailure( artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
     private static void adjustDistributionArtifactHandler( Artifact artifact )
@@ -168,7 +169,7 @@ public class LocationArtifactReportProcessor
         }
     }
 
-    private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
+    private Model readArtifactModel( File file, Artifact artifact )
     {
         Model model = null;
 
@@ -178,8 +179,8 @@ public class LocationArtifactReportProcessor
             jar = new JarFile( file );
 
             //Get the entry and its input stream.
-            JarEntry entry = jar.getJarEntry(
-                "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
+            JarEntry entry = jar.getJarEntry( "META-INF/maven/" + artifact.getGroupId() + "/"
+                + artifact.getArtifactId() + "/pom.xml" );
 
             // If the entry is not null, extract it.
             if ( entry != null )
@@ -198,11 +199,11 @@ public class LocationArtifactReportProcessor
         }
         catch ( IOException e )
         {
-            addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
+            addWarning( artifact, "Unable to read artifact to extract model: " + e );
         }
         catch ( XmlPullParserException e )
         {
-            addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
+            addWarning( artifact, "Unable to parse extracted model: " + e );
         }
         finally
         {
@@ -222,10 +223,10 @@ public class LocationArtifactReportProcessor
         return model;
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+    private void addWarning( Artifact artifact, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, null, reason );
+        database.addWarning( artifact, ROLE_HINT, null, reason );
     }
 
     private Model readModel( InputStream entryStream )
index dc6790abcf204902723d61bf17e3ce8d34c577f0..ad8465028b2017f04852bdaab8d11138e907d048 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
 
@@ -30,5 +29,5 @@ public interface MetadataReportProcessor
 {
     String ROLE = MetadataReportProcessor.class.getName();
 
-    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
+    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository );
 }
index 21bd7b4dc68085e7a047dcd2f0d8614eb5d6fc8a..b5f0817cbbb8beaeeb81193679640591afe93ffd 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -44,8 +44,15 @@ public class OldArtifactReportProcessor
      * @plexus.configuration default-value="31536000"
      */
     private int maxAge;
+    
+    /**
+     * TODO: Must create an 'Old Artifact' database.
+     * TODO: Base this off of an artifact table query instead.
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
@@ -68,7 +75,7 @@ public class OldArtifactReportProcessor
             if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
             {
                 // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-                reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
+                database.addNotice( artifact, ROLE_HINT, "old-artifact",
                                     "The artifact is older than the maximum age of " + maxAge + " seconds." );
             }
         }
index 04720762dd2a5bed379a5077ad2a80ef1c518061..02c2cecb0169653c323646c2ca5d46244b3f2d4a 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -62,15 +62,22 @@ public class OldSnapshotArtifactReportProcessor
      */
     private int maxSnapshots;
 
-    public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
+    /**
+     * TODO: Must create an 'Old Artifact' database.
+     * TODO: Base this off of an artifact table query instead.
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
+    public void processArtifact( final Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         adjustDistributionArtifactHandler( artifact );
@@ -95,13 +102,14 @@ public class OldSnapshotArtifactReportProcessor
                     catch ( ParseException e )
                     {
                         throw new IllegalStateException(
-                            "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
+                                                         "Shouldn't match timestamp pattern and not be able to parse it: "
+                                                             + m.group( 2 ) );
                     }
 
                     if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
                     {
-                        addNotice( reporter, artifact, "snapshot-expired-time",
-                                   "The artifact is older than the maximum age of " + maxAge + " seconds." );
+                        addNotice( artifact, "snapshot-expired-time", "The artifact is older than the maximum age of "
+                            + maxAge + " seconds." );
                     }
                     else if ( maxSnapshots > 0 )
                     {
@@ -109,12 +117,12 @@ public class OldSnapshotArtifactReportProcessor
                         {
                             public boolean accept( File file, String string )
                             {
-                                return string.startsWith( artifact.getArtifactId() + "-" ) &&
-                                    string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
+                                return string.startsWith( artifact.getArtifactId() + "-" )
+                                    && string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
                             }
                         } );
 
-                        List/*<Integer>*/ buildNumbers = new ArrayList();
+                        List/*<Integer>*/buildNumbers = new ArrayList();
                         Integer currentBuild = null;
                         for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
                         {
@@ -149,7 +157,7 @@ public class OldSnapshotArtifactReportProcessor
 
                         if ( buildNumbers.contains( currentBuild ) )
                         {
-                            addNotice( reporter, artifact, "snapshot-expired-count",
+                            addNotice( artifact, "snapshot-expired-count",
                                        "The artifact is older than the maximum number of retained snapshot builds." );
                         }
                     }
@@ -162,10 +170,10 @@ public class OldSnapshotArtifactReportProcessor
         }
     }
 
-    private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addNotice( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addNotice( artifact, ROLE_HINT, problem, reason );
+        database.addNotice( artifact, ROLE_HINT, problem, reason );
     }
 
     private static void adjustDistributionArtifactHandler( Artifact artifact )
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java
deleted file mode 100644 (file)
index 0d3c813..0000000
+++ /dev/null
@@ -1,143 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultReportingStore
-    extends AbstractLogEnabled
-    implements ReportingStore
-{
-    /**
-     * The cached reports for given repositories.
-     */
-    private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
-
-    public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        String key = getKey( repository, reportGroup );
-        ReportingDatabase database = (ReportingDatabase) reports.get( key );
-
-        if ( database == null )
-        {
-            ReportingXpp3Reader reader = new ReportingXpp3Reader();
-
-            File file = getReportFilename( repository, reportGroup );
-
-            FileReader fileReader = null;
-            try
-            {
-                fileReader = new FileReader( file );
-            }
-            catch ( FileNotFoundException e )
-            {
-                database = new ReportingDatabase( reportGroup, repository );
-            }
-
-            if ( database == null )
-            {
-                getLogger().info( "Reading report database from " + file );
-                try
-                {
-                    Reporting reporting = reader.read( fileReader, false );
-                    database = new ReportingDatabase( reportGroup, reporting, repository );
-                }
-                catch ( IOException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                catch ( XmlPullParserException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                finally
-                {
-                    IOUtils.closeQuietly( fileReader );
-                }
-            }
-
-            reports.put( key, database );
-        }
-        return database;
-    }
-
-    private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return repository.getId() + "/" + reportGroup.getFilename();
-    }
-
-    private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
-    }
-
-    public void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        database.updateTimings();
-
-        ReportingXpp3Writer writer = new ReportingXpp3Writer();
-
-        File file = getReportFilename( repository, database.getReportGroup() );
-        getLogger().info( "Writing reports to " + file );
-        FileWriter fileWriter = null;
-        try
-        {
-            file.getParentFile().mkdirs();
-
-            fileWriter = new FileWriter( file );
-            writer.write( fileWriter, database.getReporting() );
-        }
-        catch ( IOException e )
-        {
-            throw new ReportingStoreException( e.getMessage(), e );
-        }
-        finally
-        {
-            IOUtils.closeQuietly( fileWriter );
-        }
-    }
-}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java
deleted file mode 100644 (file)
index a7ead76..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * A component for loading the reporting database into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ReportingStore
-{
-    /**
-     * The Plexus role for the component.
-     */
-    String ROLE = ReportingStore.class.getName();
-
-    /**
-     * Get the reports from the store. A cached version may be used.
-     *
-     * @param repository  the repository to load the reports for
-     * @param reportGroup the report group to get the report for
-     * @return the reporting database
-     * @throws ReportingStoreException if there was a problem reading the store
-     */
-    ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Save the reporting to the store.
-     *
-     * @param database   the reports to store
-     * @param repository the repositorry to store the reports in
-     * @throws ReportingStoreException if there was a problem writing the store
-     */
-    void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java b/archiva-MRM-239/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java
deleted file mode 100644 (file)
index e3f0880..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring using the reporting store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ReportingStoreException
-    extends Exception
-{
-    public ReportingStoreException( String message )
-    {
-        super( message );
-    }
-
-    public ReportingStoreException( String message, Throwable e )
-    {
-        super( message, e );
-    }
-}
index 915faaa31b6a414e5d6a9599ea73b35e983b56d2..8b41251ca2c3582e76e5fc87cd04f1c53d9d8a1a 100644 (file)
@@ -1,3 +1,4 @@
+<?xml version="1.0" ?>
 <!--
   ~ Licensed to the Apache Software Foundation (ASF) under one
   ~ or more contributor license agreements.  See the NOTICE file
@@ -30,7 +31,7 @@
     </default>
   </defaults>
   <classes>
-    <class rootElement="true" xml.tagName="reporting">
+    <class rootElement="true" xml.tagName="reporting" stash.storable="false">
       <name>Reporting</name>
       <version>1.0.0</version>
       <fields>
             <multiplicity>*</multiplicity>
           </association>
         </field>
-        <field xml.attribute="true">
-          <name>lastModified</name>
-          <version>1.0.0</version>
-          <type>long</type>
-        </field>
-        <field xml.attribute="true">
-          <name>executionTime</name>
-          <version>1.0.0</version>
-          <type>long</type>
-        </field>
       </fields>
     </class>
-    <class>
+    <class stash.storable="true" 
+           jpox.use-identifiers-as-primary-key="true"
+           jpox.identity-type="application"
+           jpox.identity-class="ArtifactResultsKey">
       <name>ArtifactResults</name>
       <version>1.0.0</version>
       <fields>
-        <field>
-          <name>failures</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field>
-          <name>warnings</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field>
-          <name>notices</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true" 
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>groupId</name>
           <identity>true</identity>
           <version>1.0.0</version>
             The group ID of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>artifactId</name>
           <version>1.0.0</version>
           <identity>true</identity>
             The artifact ID of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>version</name>
           <version>1.0.0</version>
           <identity>true</identity>
             The version of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>type</name>
           <version>1.0.0</version>
           <type>String</type>
             The type of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>classifier</name>
           <version>1.0.0</version>
           <type>String</type>
             The classifier of the artifact in the result.
           </description>
         </field>
-      </fields>
-    </class>
-    <class>
-      <name>MetadataResults</name>
-      <version>1.0.0</version>
-      <fields>
         <field>
           <name>failures</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         <field>
           <name>warnings</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         <field>
           <name>notices</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         </field>
-        <field xml.attribute="true">
+      </fields>
+    </class>
+    <class stash.storable="true" 
+           jpox.use-identifiers-as-primary-key="true"
+           jpox.identity-type="application"
+           jpox.identity-class="MetadataResultsKey">
+      <name>MetadataResults</name>
+      <version>1.0.0</version>
+      <fields>
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>groupId</name>
           <version>1.0.0</version>
           <type>String</type>
             The group ID of the metadata in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>artifactId</name>
           <version>1.0.0</version>
           <type>String</type>
             The artifact ID of the metadata in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>version</name>
           <version>1.0.0</version>
           <type>String</type>
             The version of the metadata in the result.
           </description>
         </field>
+        <field>
+          <name>failures</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
+        <field>
+          <name>warnings</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
+        <field>
+          <name>notices</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
         <field xml.attribute="true">
           <name>lastModified</name>
           <version>1.0.0</version>
         </field>
       </fields>
     </class>
-    <class>
+    <class stash.storable="true">
       <name>Result</name>
       <version>1.0.0</version>
       <fields>
index 4b94710830eee06e0457f710cf49fbe70a480243..a0d6894ba54df3a101d69f6b7b69cfe9e8f5cee3 100644 (file)
@@ -25,8 +25,18 @@ import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
 import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
 import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.jpox.SchemaTool;
 
 import java.io.File;
+import java.net.URL;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
 
 /**
  *
@@ -50,6 +60,8 @@ public abstract class AbstractRepositoryReportsTestCase
     {
         super.setUp();
 
+        setupJdoFactory();
+
         File repositoryDirectory = getTestFile( "src/test/repository" );
 
         factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
@@ -60,14 +72,57 @@ public abstract class AbstractRepositoryReportsTestCase
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
     }
 
-    protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId,
-                                                     String version )
+    protected void setupJdoFactory()
+        throws Exception
+    {
+        DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
+
+        jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); //$NON-NLS-1$
+
+        jdoFactory.setDriverName( "org.hsqldb.jdbcDriver" ); //$NON-NLS-1$
+
+        jdoFactory.setUrl( "jdbc:hsqldb:mem:" + getName() ); //$NON-NLS-1$
+
+        jdoFactory.setUserName( "sa" ); //$NON-NLS-1$
+
+        jdoFactory.setPassword( "" ); //$NON-NLS-1$
+
+        jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "javax.jdo.PersistenceManagerFactoryClass", "org.jpox.PersistenceManagerFactoryImpl" );
+
+        Properties properties = jdoFactory.getProperties();
+
+        for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
+        {
+            Map.Entry entry = (Map.Entry) it.next();
+
+            System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
+        }
+
+        SchemaTool.createSchemaTables( new URL[] { getClass()
+            .getResource( "/org/apache/maven/archiva/reporting/model/package.jdo" ) }, new URL[] {}, null, false, null ); //$NON-NLS-1$
+
+        PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
+
+        assertNotNull( pmf );
+
+        PersistenceManager pm = pmf.getPersistenceManager();
+
+        pm.close();
+    }
+
+    protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId, String version )
         throws Exception
     {
         Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "jar" );
 
-        artifact.setRepository(
-            factory.createArtifactRepository( "repository", repository.toURL().toString(), layout, null, null ) );
+        artifact.setRepository( factory.createArtifactRepository( "repository", repository.toURL().toString(), layout,
+                                                                  null, null ) );
 
         artifact.isSnapshot();
 
@@ -90,8 +145,8 @@ public abstract class AbstractRepositoryReportsTestCase
     protected Artifact createArtifactWithClassifier( String groupId, String artifactId, String version, String type,
                                                      String classifier )
     {
-        Artifact artifact =
-            artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+        Artifact artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                          classifier );
         artifact.setRepository( repository );
         return artifact;
     }
diff --git a/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java b/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java
new file mode 100644 (file)
index 0000000..49a5bf7
--- /dev/null
@@ -0,0 +1,108 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * ArtifactResultsDatabaseTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsDatabaseTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private Artifact artifact;
+    private String processor, problem, reason;
+
+    private ArtifactResultsDatabase database;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        
+        artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
+                                        "classifier", null );
+        processor = "processor";
+        problem = "problem";
+        reason = "reason";
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        release( database );
+
+        super.tearDown();
+    }
+    
+    public void testAddNoticeArtifactStringStringString()
+    {
+        database.addNotice( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, artifactResults.getNotices().size() );
+
+        database.addNotice( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, artifactResults.getNotices().size() );
+    }
+
+    public void testAddWarningArtifactStringStringString()
+    {
+        database.addWarning( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, artifactResults.getWarnings().size() );
+
+        database.addWarning( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, artifactResults.getWarnings().size() );
+    }
+
+    public void testAddFailureArtifactStringStringString()
+    {
+        database.addFailure( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, artifactResults.getFailures().size() );
+
+        database.addFailure( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, artifactResults.getFailures().size() );
+    }
+}
diff --git a/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java b/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java
new file mode 100644 (file)
index 0000000..acee253
--- /dev/null
@@ -0,0 +1,113 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * MetadataResultsDatabaseTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsDatabaseTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private MetadataResultsDatabase database;
+
+    private RepositoryMetadata metadata;
+
+    private String processor, problem, reason;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+
+        Artifact artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope",
+                                                 "type", "classifier", null );
+        metadata = new ArtifactRepositoryMetadata( artifact );
+
+        processor = "processor";
+        problem = "problem";
+        reason = "reason";
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        release( database );
+
+        super.tearDown();
+    }
+
+    public void testAddNoticeRepositoryMetadataStringStringString()
+    {
+        database.addNotice( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, metadataResults.getNotices().size() );
+
+        database.addNotice( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, metadataResults.getNotices().size() );
+    }
+
+    public void testAddWarningRepositoryMetadataStringStringString()
+    {
+        database.addWarning( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, metadataResults.getWarnings().size() );
+
+        database.addWarning( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, metadataResults.getWarnings().size() );
+    }
+
+    public void testAddFailureRepositoryMetadataStringStringString()
+    {
+        database.addFailure( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, metadataResults.getFailures().size() );
+
+        database.addFailure( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, metadataResults.getFailures().size() );
+    }
+}
index af2d45149a1a353d368dda81ed2b930b662091ee..e03d59c4fa27a2e129a80d80384c1c1014c38717 100644 (file)
@@ -19,14 +19,7 @@ package org.apache.maven.archiva.reporting.database;
  * under the License.
  */
 
-import junit.framework.TestCase;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
 
 /**
  * Test for {@link ReportingDatabase}.
@@ -35,117 +28,28 @@ import org.apache.maven.artifact.versioning.VersionRange;
  * @version $Id$
  */
 public class ReportingDatabaseTest
-    extends TestCase
+    extends AbstractRepositoryReportsTestCase
 {
-    private Artifact artifact;
-
-    private String processor, problem, reason;
-
-    private ReportingDatabase reportingDatabase;
-
-    private RepositoryMetadata metadata;
+    private ReportingDatabase database;
 
     protected void setUp()
         throws Exception
     {
         super.setUp();
-        artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
-                                        "classifier", null );
-        processor = "processor";
-        problem = "problem";
-        reason = "reason";
-        reportingDatabase = new ReportingDatabase( null );
-
-        metadata = new ArtifactRepositoryMetadata( artifact );
-    }
-
-    public void testAddNoticeArtifactStringStringString()
-    {
-        reportingDatabase.addNotice( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, artifactResults.getNotices().size() );
-
-        reportingDatabase.addNotice( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, artifactResults.getNotices().size() );
+        database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
     }
 
-    public void testAddWarningArtifactStringStringString()
-    {
-        reportingDatabase.addWarning( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, artifactResults.getWarnings().size() );
-
-        reportingDatabase.addWarning( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, artifactResults.getWarnings().size() );
-    }
-
-    public void testAddFailureArtifactStringStringString()
-    {
-        reportingDatabase.addFailure( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, artifactResults.getFailures().size() );
-
-        reportingDatabase.addFailure( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, artifactResults.getFailures().size() );
-    }
-
-    public void testAddNoticeRepositoryMetadataStringStringString()
-    {
-        reportingDatabase.addNotice( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, metadataResults.getNotices().size() );
-
-        reportingDatabase.addNotice( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, metadataResults.getNotices().size() );
-    }
-
-    public void testAddWarningRepositoryMetadataStringStringString()
+    protected void tearDown()
+        throws Exception
     {
-        reportingDatabase.addWarning( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, metadataResults.getWarnings().size() );
-
-        reportingDatabase.addWarning( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, metadataResults.getWarnings().size() );
+        release( database );
+        super.tearDown();
     }
 
-    public void testAddFailureRepositoryMetadataStringStringString()
+    public void testLookup()
     {
-        reportingDatabase.addFailure( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, metadataResults.getFailures().size() );
-
-        reportingDatabase.addFailure( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, metadataResults.getFailures().size() );
+        assertNotNull( "database should not be null.", database );
+        assertNotNull( "database.artifactDatabase should not be null.", database.getArtifactDatabase() );
+        assertNotNull( "database.metadataDatabase should not be null.", database.getMetadataDatabase() );
     }
 }
index ad33b0d2f6ea12daab4f083ee6a596b8888831d9..feebb72d01faa87b06b43d41277aeb2ff1741469 100644 (file)
@@ -19,9 +19,9 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.archiva.reporting.model.MetadataResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -36,6 +36,11 @@ import org.apache.maven.artifact.repository.metadata.Versioning;
 
 import java.util.Iterator;
 
+/**
+ * BadMetadataReportProcessorTest 
+ *
+ * @version $Id$
+ */
 public class BadMetadataReportProcessorTest
     extends AbstractRepositoryReportsTestCase
 {
@@ -43,7 +48,7 @@ public class BadMetadataReportProcessorTest
 
     private MetadataReportProcessor badMetadataReportProcessor;
 
-    private ReportingDatabase reportingDatabase;
+    private MetadataResultsDatabase database;
 
     protected void setUp()
         throws Exception
@@ -52,12 +57,19 @@ public class BadMetadataReportProcessorTest
 
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
 
-        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
 
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
     }
 
+    protected void tearDown()
+        throws Exception
+    {
+        release( artifactFactory );
+        release( badMetadataReportProcessor );
+        super.tearDown();
+    }
+    
     public void testMetadataMissingLastUpdated()
     {
         Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
@@ -68,9 +80,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -87,9 +99,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -106,41 +118,41 @@ public class BadMetadataReportProcessorTest
         if ( alpha1First )
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         else
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         result = (Result) failures.next();
         if ( !alpha1First )
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         else
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
     public void testSnapshotMetadataMissingVersioning()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -162,9 +174,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
@@ -178,9 +190,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -189,8 +201,8 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -206,9 +218,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -217,8 +229,8 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -233,9 +245,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -244,14 +256,14 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+                          .getReason() );
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -261,9 +273,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
@@ -274,17 +286,16 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
-                      result.getReason() );
+        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository", result.getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -293,18 +304,17 @@ public class BadMetadataReportProcessorTest
         RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
-                      result.getReason() );
+        assertEquals( "check reason", "Plugin snapshot-artifact is present in the repository but "
+            + "missing in the metadata.", result.getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -316,22 +326,22 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3", result
+            .getReason() );
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -341,9 +351,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -354,8 +364,8 @@ public class BadMetadataReportProcessorTest
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -365,9 +375,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -380,8 +390,8 @@ public class BadMetadataReportProcessorTest
 
     public void testValidSnapshotMetadata()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         Snapshot snapshot = new Snapshot();
         snapshot.setBuildNumber( 1 );
@@ -389,16 +399,16 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
     public void testInvalidSnapshotMetadata()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         Snapshot snapshot = new Snapshot();
         snapshot.setBuildNumber( 2 );
@@ -406,9 +416,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -416,16 +426,21 @@ public class BadMetadataReportProcessorTest
         Result result = (Result) failures.next();
         assertMetadata( metadata, results );
         // TODO: should be more robust
-        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
-                      result.getReason() );
+        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
     private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
     {
-        assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
-        assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
-        assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getGroupId() ), results.getGroupId() );
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getArtifactId() ), results.getArtifactId() );
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getBaseVersion() ), results.getVersion() );
     }
 
     private Plugin createMetadataPlugin( String artifactId, String prefix )
index bf1ee803ccae35fcfe617f4377b1a325a216dd69..afbb2037a9c59308985eec68db5130f9dd552bf9 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -43,7 +42,7 @@ public class DependencyArtifactReportProcessorTest
 
     private static final String VALID_VERSION = "1.0-alpha-1";
 
-    private ReportingDatabase reportingDatabase;
+    private ArtifactResultsDatabase database;
 
     private Model model;
 
@@ -58,27 +57,24 @@ public class DependencyArtifactReportProcessorTest
     {
         super.setUp();
         model = new Model();
-        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
-
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
     }
 
     public void testArtifactFoundButNoDirectDependencies()
     {
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     private Artifact createValidArtifact()
     {
-        Artifact projectArtifact =
-            artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+        Artifact projectArtifact = artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID,
+                                                                          VALID_VERSION );
         projectArtifact.setRepository( repository );
         return projectArtifact;
     }
@@ -87,11 +83,11 @@ public class DependencyArtifactReportProcessorTest
     {
         Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
         artifact.setRepository( repository );
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -106,10 +102,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createValidDependency();
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     private Dependency createValidDependency()
@@ -124,10 +120,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createValidDependency();
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithValidMultipleDependencies()
@@ -140,10 +136,10 @@ public class DependencyArtifactReportProcessorTest
         model.addDependency( dependency );
 
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithAnInvalidDependency()
@@ -156,18 +152,17 @@ public class DependencyArtifactReportProcessorTest
         model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
 
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
         Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
-                      result.getReason() );
+        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ), result.getReason() );
     }
 
     public void testValidArtifactWithInvalidDependencyGroupId()
@@ -177,12 +172,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -206,12 +201,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -226,11 +221,11 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -245,12 +240,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -265,10 +260,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithMissingDependencyVersion()
@@ -278,12 +273,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
index dbbb3bd8ce5236f27b11412fc996aff0231e8286..6ab299a5c4c8a28a1e654b5810cd527471d517d6 100644 (file)
@@ -24,8 +24,7 @@ import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
 import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
 import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.model.Model;
@@ -49,7 +48,7 @@ public class DuplicateArtifactFileReportProcessorTest
 
     File indexDirectory;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     protected void setUp()
         throws Exception
@@ -60,8 +59,11 @@ public class DuplicateArtifactFileReportProcessorTest
         FileUtils.deleteDirectory( indexDirectory );
 
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+        
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        
         artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
-        System.out.println( "artifact = " + artifact );
+        
         model = new Model();
 
         RepositoryArtifactIndexFactory factory =
@@ -75,9 +77,6 @@ public class DuplicateArtifactFileReportProcessorTest
         index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
 
         processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     public void testNullArtifactFile()
@@ -85,32 +84,32 @@ public class DuplicateArtifactFileReportProcessorTest
     {
         artifact.setFile( null );
 
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 1, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnAlreadyIndexedArtifact()
         throws Exception
     {
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnDifferentGroupId()
         throws Exception
     {
         artifact.setGroupId( "different.groupId" );
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnNewArtifact()
@@ -118,11 +117,11 @@ public class DuplicateArtifactFileReportProcessorTest
     {
         Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
 
-        processor.processArtifact( newArtifact, model, reportDatabase );
+        processor.processArtifact( newArtifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testFailure()
@@ -132,11 +131,11 @@ public class DuplicateArtifactFileReportProcessorTest
                                              artifact.getVersion(), artifact.getType() );
         duplicate.setFile( artifact.getFile() );
 
-        processor.processArtifact( duplicate, model, reportDatabase );
+        processor.processArtifact( duplicate, model );
 
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check no failures", 1, database.getNumFailures() );
     }
 
     private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
index 049ae48d54576fb692da74e4cff9ae8e1718824c..f37bd20ac03064f9f789e56fca6f51ab9946188b 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 
 /**
@@ -32,16 +31,14 @@ public class InvalidPomArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     /**
@@ -51,8 +48,8 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
 
@@ -63,10 +60,10 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
 
@@ -77,9 +74,9 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 }
index 9eb59b3a7ca5b31c675b8bd6b16287727a1b4015..134295cb9abbebbd78083222a54f27137f3a3b71 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
@@ -40,16 +39,14 @@ public class LocationArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     /**
@@ -61,10 +58,10 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -78,10 +75,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -94,10 +91,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( pomArtifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -111,10 +108,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -129,10 +126,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -147,10 +144,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -164,9 +161,9 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        artifactReportProcessor.processArtifact( artifact, model );
 
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     /**
@@ -180,8 +177,8 @@ public class LocationArtifactReportProcessorTest
 
         Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     private Model readPom( String path )
@@ -210,8 +207,8 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     /**
@@ -223,8 +220,8 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
 }
index 117a25fd1d22639ddf783d5a76c257d7c9b87935..325ba6a34970e794251914c4a7ce53b0787100ee 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -38,27 +37,25 @@ public class OldArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     public void testOldArtifact()
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 1, database.getNumNotices() );
+        ArtifactResults results = (ArtifactResults) database.getIterator().next();
         assertEquals( artifact.getArtifactId(), results.getArtifactId() );
         assertEquals( artifact.getGroupId(), results.getGroupId() );
         assertEquals( artifact.getVersion(), results.getVersion() );
@@ -78,10 +75,10 @@ public class OldArtifactReportProcessorTest
 
         Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testMissingArtifact()
@@ -91,7 +88,7 @@ public class OldArtifactReportProcessorTest
 
         try
         {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            artifactReportProcessor.processArtifact( artifact, null );
             fail( "Should not have passed" );
         }
         catch ( IllegalStateException e )
index b09732ff5b8d563a86ab0c60907f6cf85c60ad0b..680373ea33db85272e73a69833121b09510c9e7f 100644 (file)
@@ -21,8 +21,7 @@ package org.apache.maven.archiva.reporting.processor;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -40,7 +39,7 @@ public class OldSnapshotArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     private File tempRepository;
 
@@ -48,11 +47,10 @@ public class OldSnapshotArtifactReportProcessorTest
         throws Exception
     {
         super.setUp();
-        artifactReportProcessor =
-            (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE,
+                                                                    "old-snapshot-artifact" );
 
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
         tempRepository = getTestFile( "target/test-repository" );
         FileUtils.deleteDirectory( tempRepository );
     }
@@ -61,11 +59,11 @@ public class OldSnapshotArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 1, database.getNumNotices() );
+        Iterator artifactIterator = database.getIterator();
         assertArtifactResults( artifactIterator, artifact );
     }
 
@@ -86,20 +84,20 @@ public class OldSnapshotArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testNonSnapshotArtifact()
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testNewSnapshotArtifact()
@@ -113,13 +111,13 @@ public class OldSnapshotArtifactReportProcessorTest
         String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
         FileUtils.writeStringToFile( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ), "foo", null );
 
-        Artifact artifact =
-            createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
+        Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date
+            + "-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testTooManySnapshotArtifact()
@@ -137,15 +135,15 @@ public class OldSnapshotArtifactReportProcessorTest
 
         for ( int i = 1; i <= 5; i++ )
         {
-            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
-                                                              "1.0-alpha-1-" + date + "-" + i );
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", "1.0-alpha-1-"
+                + date + "-" + i );
+            artifactReportProcessor.processArtifact( artifact, null );
         }
 
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 3, database.getNumNotices() );
+        Iterator artifactIterator = database.getIterator();
         for ( int i = 1; i <= 3; i++ )
         {
             String version = "1.0-alpha-1-" + date + "-" + i;
@@ -161,7 +159,7 @@ public class OldSnapshotArtifactReportProcessorTest
 
         try
         {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            artifactReportProcessor.processArtifact( artifact, null );
             fail( "Should not have passed" );
         }
         catch ( IllegalStateException e )
index 810ff1fc369e8a7cff46e5ec462085e9356f0fd3..3fc9acd0b73dfd6aca89c3e6ae35ff3d8acbb328 100644 (file)
@@ -19,21 +19,12 @@ package org.apache.maven.archiva.reporting.reporter;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
-import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
 import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
 import org.codehaus.plexus.digest.DigesterException;
 
-import java.io.File;
 import java.io.IOException;
-import java.util.Iterator;
 
 /**
  * This class tests the ChecksumArtifactReportProcessor.
@@ -44,19 +35,14 @@ public class ChecksumArtifactReporterTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportingDatabase;
-
-    private MetadataReportProcessor metadataReportProcessor;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
-        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
     }
 
     /**
@@ -70,10 +56,10 @@ public class ChecksumArtifactReporterTest
 
         Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -85,79 +71,9 @@ public class ChecksumArtifactReporterTest
         String s1 = "1.0";
         Artifact artifact = createArtifact( "checksumTest", s, s1 );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the valid checksum of a metadata file.
-     * The reportingDatabase should report 2 success validation.
-     */
-    public void testChecksumMetadataReporterSuccess()
-        throws DigesterException, IOException
-    {
-        createMetadataFile( "VALID" );
-        createMetadataFile( "INVALID" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        //Version level metadata
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Artifact level metadata
-        metadata = new ArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Group level metadata
-        metadata = new GroupRepositoryMetadata( "checksumTest" );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-    }
-
-    /**
-     * Test the corrupted checksum of a metadata file.
-     * The reportingDatabase must report 2 failures.
-     */
-    public void testChecksumMetadataReporterFailure()
-    {
-        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-    }
-
-    /**
-     * Test the conditional when the checksum files of the artifact & metadata do not exist.
-     */
-    public void testChecksumFilesDoNotExist()
-        throws DigesterException, IOException
-    {
-        createChecksumFile( "VALID" );
-        createMetadataFile( "VALID" );
-        deleteChecksumFiles( "jar" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-
-        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
     }
 }
diff --git a/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java b/archiva-MRM-239/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java
new file mode 100644 (file)
index 0000000..6cd3583
--- /dev/null
@@ -0,0 +1,135 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * ChecksumMetadataReporterTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ChecksumMetadataReporterTest
+    extends AbstractChecksumArtifactReporterTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private MetadataReportProcessor metadataReportProcessor;
+
+    private MetadataResultsDatabase database;
+
+    private ArtifactResultsDatabase artifactsDatabase;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+        artifactsDatabase = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+    }
+
+    /**
+     * Test the valid checksum of a metadata file.
+     * The reportingDatabase should report 2 success validation.
+     */
+    public void testChecksumMetadataReporterSuccess()
+        throws DigesterException, IOException
+    {
+        createMetadataFile( "VALID" );
+        createMetadataFile( "INVALID" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        //Version level metadata
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        //Artifact level metadata
+        metadata = new ArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        //Group level metadata
+        metadata = new GroupRepositoryMetadata( "checksumTest" );
+        metadataReportProcessor.processMetadata( metadata, repository );
+    }
+
+    /**
+     * Test the corrupted checksum of a metadata file.
+     * The reportingDatabase must report 2 failures.
+     */
+    public void testChecksumMetadataReporterFailure()
+    {
+        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        Iterator failures = database.getIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+    }
+
+    /**
+     * Test the conditional when the checksum files of the artifact & metadata do not exist.
+     */
+    public void testChecksumFilesDoNotExist()
+        throws DigesterException, IOException
+    {
+        createChecksumFile( "VALID" );
+        createMetadataFile( "VALID" );
+        deleteChecksumFiles( "jar" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, artifactsDatabase.getNumFailures() );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        Iterator failures = database.getIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+
+        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+    }
+
+}
index 2be9571e10af4aed68f0ed46d4fb11c2291fbdfd..f2034e9d5c11df1f5b233a5d8a2170800978c390 100644 (file)
@@ -21,7 +21,6 @@ package org.apache.maven.archiva.reporting.reporter;
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
 import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.MetadataResults;
 import org.apache.maven.archiva.reporting.model.Result;
@@ -34,12 +33,14 @@ import org.apache.maven.artifact.repository.metadata.Versioning;
 import java.util.Iterator;
 
 /**
+ * DefaultArtifactReporterTest 
  *
+ * @version $Id$
  */
 public class DefaultArtifactReporterTest
     extends AbstractRepositoryReportsTestCase
 {
-    private ReportingDatabase reportingDatabase;
+    private ReportingDatabase database;
 
     private RepositoryMetadata metadata;
 
@@ -49,23 +50,41 @@ public class DefaultArtifactReporterTest
 
     private Artifact artifact;
 
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
+
+        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+
+        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+    }
+
     public void testEmptyArtifactReporter()
     {
-        assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-        assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
-        assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
+        assertEquals( "No failures", 0, database.getNumFailures() );
+        assertEquals( "No warnings", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
+        assertFalse( "No artifact failures", database.getArtifactIterator().hasNext() );
+        assertFalse( "No metadata failures", database.getMetadataIterator().hasNext() );
     }
 
     public void testMetadataSingleFailure()
     {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getMetadataIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -87,13 +106,13 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataMultipleFailures()
     {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getMetadataIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -113,12 +132,12 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataSingleWarning()
     {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 1, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -133,13 +152,13 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataMultipleWarnings()
     {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 2, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -159,12 +178,12 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataSingleNotice()
     {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check notices", 1, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -179,13 +198,13 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataMultipleNotices()
     {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 2, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -205,12 +224,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleFailure()
     {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getArtifactIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         ArtifactResults results = (ArtifactResults) failures.next();
         failures = results.getFailures().iterator();
@@ -234,13 +253,13 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactMultipleFailures()
     {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getArtifactIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         ArtifactResults results = (ArtifactResults) failures.next();
         failures = results.getFailures().iterator();
@@ -260,12 +279,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleWarning()
     {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 1, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -280,13 +299,13 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactMultipleWarnings()
     {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 2, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -306,12 +325,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleNotice()
     {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check notices", 1, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -326,13 +345,13 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactMultipleNotices()
     {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 2, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -350,22 +369,4 @@ public class DefaultArtifactReporterTest
         assertFalse( "no more warnings", warnings.hasNext() );
     }
 
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-
-        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
 }
diff --git a/archiva-MRM-239/archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml b/archiva-MRM-239/archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml
new file mode 100644 (file)
index 0000000..6e27005
--- /dev/null
@@ -0,0 +1,9 @@
+<component-set>
+  <components>
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+    </component>
+  </components>
+</component-set>
diff --git a/archiva-MRM-239/archiva-reports-standard/src/test/resources/log4j.properties b/archiva-MRM-239/archiva-reports-standard/src/test/resources/log4j.properties
new file mode 100644 (file)
index 0000000..9b2c26a
--- /dev/null
@@ -0,0 +1,10 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
index 0d9da15daf6cf3dbfb6b3a46f412055c62cd5bd0..5ae0123925e47ad27efc7375d09084f8e6361d99 100644 (file)
         <requirement>
           <role>org.codehaus.plexus.digest.Digester</role>
           <role-hint>md5</role-hint>
+          <field-name>digester</field-name>
         </requirement>
         <requirement>
           <role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
+          <field-name>indexFactory</field-name>
+        </requirement>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
         </requirement>
       </requirements>
       <configuration>
index 64be4813ea86fb431def5ae86327dec3b192096e..016361e9be517e3068b80f3a9ab51bb93d60a99a 100644 (file)
       <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
       <role-hint>old-artifact</role-hint>
       <implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
+        </requirement>
+      </requirements>
       <configuration>
         <maxAge>10</maxAge>
       </configuration>
index 64acabcf2a572251b78f20f235f8756da9b25007..779aa8c243c9dba9bc78f50a757948bbf15f8880 100644 (file)
       <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
       <role-hint>old-snapshot-artifact</role-hint>
       <implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
+        </requirement>
+      </requirements>
       <configuration>
         <maxAge>3600</maxAge>
         <maxSnapshots>2</maxSnapshots>
index e5d22119f1ae3296a0173fc9ab1112707536585d..e400dfe16097af89372a0364a43f38ee0d492c64 100644 (file)
     <dependency>
       <groupId>org.apache.derby</groupId>
       <artifactId>derby</artifactId>
+      <version>10.1.3.1</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
index 93568a2ae4f52c73257121c963a4f3fc71fc45c6..6ae6b8a499151eba19b1f03255c59822454cad2e 100644 (file)
@@ -35,7 +35,7 @@ import java.util.Date;
 /**
  * AuditLog - Audit Log. 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * 
  * @plexus.component role="org.apache.maven.archiva.web.repository.AuditLog"
index 77e45ebfd5c9b71d754ab4c441fe1aa45edf9d1c..db43e54b77dc33827adcf74319295acb87e67ea9 100644 (file)
@@ -49,7 +49,7 @@ import java.util.List;
 /**
  * ProxiedDavServer
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * @plexus.component role="org.codehaus.plexus.webdav.DavServerComponent"
  * role-hint="proxied"
index d658dce275b7639fb3c73578dedfa9ea8014fbcb..ca4b1c54a8057a8a5ec2fa2094d0ce371814dd35 100644 (file)
@@ -52,7 +52,7 @@ import java.util.List;
 /**
  * RepositoryServlet
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class RepositoryServlet
index 226f27c387c56ad855330b39d0eb95196bbea38d..792aadd58c53fd72091bd3e7a6c5954573aa300d 100644 (file)
@@ -44,7 +44,7 @@ import javax.servlet.jsp.PageContext;
 /**
  * DownloadArtifact 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * 
  * @plexus.component role="com.opensymphony.webwork.components.Component" role-hint="download-artifact" 
index 7127acad91b86a9684f53996e1214e41b4d0044a..21aaa982f753e9be07da204a14257db029e63c2f 100644 (file)
@@ -27,7 +27,7 @@ import javax.servlet.jsp.tagext.TagSupport;
 /**
  * DownloadArtifactTag 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class DownloadArtifactTag
index 8efab8a18a48f881d49e4520520d32c78b96064e..b59c5636fcea77aabf5fdf648a5ab833256c543f 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.Tag;
 /**
  * ExpressionTool 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ExpressionTool
index 02347c150d2a3864419bf0ff321516ca7c61502d..65860f280d762858f2cfff876ebd5e011eb50678 100644 (file)
@@ -33,7 +33,7 @@ import javax.servlet.http.HttpServletResponse;
 /**
  * GroupIdLink 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class GroupIdLink
index 462d6ae6f2d43a848d7aca4a223a01ea44d055ec..ec42b8bbec0cba3215b9ab9db74d8166dc96fc90 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.TagSupport;
 /**
  * GroupIdLink 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class GroupIdLinkTag
index 2b730c6bbd98bdf4f52cb90b68c558d8a6cefcff..a56068a3021d9d6b4dfc82d7693a2aa225a94ecb 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.PageContext;
 /**
  * PlexusTagUtil 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class PlexusTagUtil
index 811489d43ad81beea5d63e5db833eef8df7c496c..7588e3470036ad915cf040a3dd7669713deafa95 100644 (file)
     </component>
     <component>
       <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
-      <role-hint>indexer</role-hint>
+      <role-hint>data-refresh</role-hint>
     </component>
   </load-on-start>
 </plexus>
index 2bd13d6025a9a88845492375fa53749b72db2413..4b7ba1e114c911ae619cc2eee560e0d05c09ef86 100644 (file)
       </snapshots>
     </repository>
   </repositories>
+  <pluginRepositories>
+    <pluginRepository>
+      <id>codehaus.org</id>
+      <url>http://snapshots.repository.codehaus.org</url>
+      <releases>
+        <enabled>false</enabled>
+      </releases>
+      <snapshots>
+        <enabled>true</enabled>
+      </snapshots>
+    </pluginRepository>
+  </pluginRepositories>
   <properties>
     <maven.version>2.0.4</maven.version>
     <wagon.version>1.0-beta-2</wagon.version>