]> source.dussan.org Git - archiva.git/commitdiff
Merge from archiva-MRM-239 branch to trunk. r506385:HEAD
authorJoakim Erdfelt <joakime@apache.org>
Fri, 23 Feb 2007 19:05:21 +0000 (19:05 +0000)
committerJoakim Erdfelt <joakime@apache.org>
Fri, 23 Feb 2007 19:05:21 +0000 (19:05 +0000)
git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@511053 13f79535-47bb-0310-9956-ffa450edef68

260 files changed:
archiva-cli/pom.xml
archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java
archiva-common/pom.xml [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java [new file with mode: 0644]
archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java [new file with mode: 0644]
archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java [new file with mode: 0644]
archiva-common/src/test/legacy-repository/CVS/Root [new file with mode: 0644]
archiva-common/src/test/legacy-repository/KEYS [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/foo/invalid-1.0.foo [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/invalid-1.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/jars/1.0/invalid-1.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/jars/invalid-1.0.rar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/jars/invalid.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/invalid/jars/no-extension [new file with mode: 0644]
archiva-common/src/test/legacy-repository/javax.sql/jars/jdbc-2.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/some-ejb-1.0-client.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-20050611.112233-1.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-sources.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.jar [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.tar.gz [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.zip [new file with mode: 0644]
archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-UNKNOWN.jar [new file with mode: 0644]
archiva-common/src/test/repository/CVS/Root [new file with mode: 0644]
archiva-common/src/test/repository/KEYS [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid/1.0/invalid-1.0b.jar [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid/1.0/invalid-2.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/invalid/invalid/1/invalid-1 [new file with mode: 0644]
archiva-common/src/test/repository/javax/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/javax/sql/jdbc/2.0/jdbc-2.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml [new file with mode: 0644]
archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml [new file with mode: 0644]
archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-sources.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-test-sources.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.tar.gz [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.zip [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml [new file with mode: 0644]
archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml [new file with mode: 0644]
archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml [new file with mode: 0644]
archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml [new file with mode: 0644]
archiva-configuration/src/main/mdo/configuration.mdo
archiva-configuration/src/test/java/org/apache/maven/archiva/configuration/ArchivaConfigurationTest.java
archiva-converter/pom.xml
archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java [new file with mode: 0644]
archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java [new file with mode: 0644]
archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java
archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java
archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java [new file with mode: 0644]
archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java [new file with mode: 0644]
archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java [new file with mode: 0644]
archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java [new file with mode: 0644]
archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java [new file with mode: 0644]
archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java
archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java [new file with mode: 0644]
archiva-converter/src/test/resources/log4j.properties [new file with mode: 0644]
archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml
archiva-core/pom.xml
archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java
archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java [deleted file]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java [new file with mode: 0644]
archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java [deleted file]
archiva-core/src/main/resources/META-INF/plexus/components.xml
archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java [new file with mode: 0644]
archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java [deleted file]
archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java [new file with mode: 0644]
archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java
archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java [new file with mode: 0644]
archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java [new file with mode: 0644]
archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java [deleted file]
archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml [new file with mode: 0644]
archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml [deleted file]
archiva-discoverer/pom.xml
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java [new file with mode: 0644]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java [new file with mode: 0644]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java [new file with mode: 0644]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java [deleted file]
archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java [deleted file]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java [deleted file]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java [new file with mode: 0644]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java [new file with mode: 0644]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java [deleted file]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java [new file with mode: 0644]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java [deleted file]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java [deleted file]
archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java [new file with mode: 0644]
archiva-discoverer/src/test/repository/javax/maven-metadata.xml
archiva-indexer/pom.xml
archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java
archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java
archiva-proxy/pom.xml
archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultProxyRequestHandler.java
archiva-proxy/src/test/resources/org/apache/maven/archiva/proxy/ProxyRequestHandlerTest.xml
archiva-reports-standard/pom.xml
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java [deleted file]
archiva-reports-standard/src/main/mdo/reporting.mdo
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AbstractRepositoryReportsTestCase.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AllTests.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/AllTests.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ReportingDatabaseTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/AllTests.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AllTests.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumArtifactReporterTest.java
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/DefaultArtifactReporterTest.java
archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml [new file with mode: 0644]
archiva-reports-standard/src/test/resources/log4j.properties [new file with mode: 0644]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.xml
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.xml
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.xml
archiva-security/pom.xml
archiva-site/src/site/resources/images/graph-multimodule.dot [new file with mode: 0644]
archiva-site/src/site/resources/images/graph-multimodule.png [new file with mode: 0644]
archiva-webapp/pom.xml
archiva-webapp/src/jetty-env.xml
archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ReportsAction.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ShowArtifactAction.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/ConfigureAction.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/RunRepositoryTaskAction.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java
archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java
archiva-webapp/src/main/resources/META-INF/plexus/application.xml
archiva-webapp/src/main/resources/log4j.xml [new file with mode: 0644]
archiva-webapp/src/main/resources/xwork.xml
archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf [new file with mode: 0644]
archiva-webapp/src/main/webapp/WEB-INF/jsp/reports/reports.jsp
archiva-webapp/src/main/webapp/WEB-INF/jsp/showArtifact.jsp
pom.xml

index 9a25de821cf83b9909b797286b6414f829d976c1..17a2b9e5947b878490c51164de6fea97c589b5a5 100644 (file)
@@ -30,7 +30,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-core</artifactId>
+      <artifactId>archiva-converter</artifactId>
     </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
index f9ba5cc9e6ba2e3051a9dd8041e24b05db50f1f8..26ee4db77c7cc6a780e022013cce79cefbc812f1 100644 (file)
@@ -23,9 +23,8 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
 import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
 import org.codehaus.plexus.PlexusContainer;
 import org.codehaus.plexus.tools.cli.AbstractCli;
 
@@ -103,28 +102,25 @@ public class ArchivaCli
 
             System.out.println( "Converting " + oldRepositoryPath + " to " + newRepositoryPath );
 
-            List blacklistedPatterns = null;
+            List fileExclusionPatterns = null;
 
             String s = p.getProperty( BLACKLISTED_PATTERNS );
 
             if ( s != null )
             {
-                blacklistedPatterns = Arrays.asList( StringUtils.split( s, "," ) );
+                fileExclusionPatterns = Arrays.asList( StringUtils.split( s, "," ) );
             }
 
             try
             {
                 legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath,
-                                                                   blacklistedPatterns, true );
+                                                                   fileExclusionPatterns,
+                                                                   true );
             }
             catch ( RepositoryConversionException e )
             {
                 showFatalError( "Error converting repository.", e, true );
             }
-            catch ( DiscovererException e )
-            {
-                showFatalError( "Error discovery artifacts to convert.", e, true );
-            }
         }
     }
 }
diff --git a/archiva-common/pom.xml b/archiva-common/pom.xml
new file mode 100644 (file)
index 0000000..47d3c62
--- /dev/null
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <parent>
+    <artifactId>archiva</artifactId>
+    <groupId>org.apache.maven.archiva</groupId>
+    <version>1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>archiva-common</artifactId>
+  <name>Archiva Common</name>
+  <dependencies>
+    <!-- TO OTHER DEVELOPERS:
+         This module should depend on NO OTHER ARCHIVA MODULES.
+         If you feel tempted to add one, discuss it first in the 
+         archiva-dev@maven.apache.org mailing-list.
+            joakime@apache.org
+      -->
+    <dependency>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-component-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-artifact-manager</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-project</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-container-default</artifactId>
+    </dependency>
+  </dependencies>
+  <build>
+    <plugins>
+      <!--
+      <plugin>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>test-jar</id>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+       -->
+      <plugin>
+        <groupId>org.codehaus.plexus</groupId>
+        <artifactId>plexus-maven-plugin</artifactId>
+        <!--
+        <executions>
+          <execution>
+            <id>merge</id>
+            <goals>
+              <goal>merge-descriptors</goal>
+            </goals>
+            <configuration>
+              <descriptors>
+                <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+                <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+        </executions>
+         -->
+      </plugin>
+    </plugins>
+  </build>
+</project>
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java
new file mode 100644 (file)
index 0000000..c807d70
--- /dev/null
@@ -0,0 +1,40 @@
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ArchivaException 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArchivaException
+    extends Exception
+{
+    public ArchivaException( String message, Throwable cause )
+    {
+        super( message, cause );
+    }
+
+    public ArchivaException( String message )
+    {
+        super( message );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..b77826a
--- /dev/null
@@ -0,0 +1,55 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractLayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    /**
+     * @plexus.requirement
+     */
+    protected ArtifactFactory artifactFactory;
+
+    /**
+     * Constructor used by plexus
+     */
+    public AbstractLayoutArtifactBuilder()
+    {
+
+    }
+
+    /**
+     * Constructor used by manual process.
+     * 
+     * @param artifactFactory the artifact factory to use.
+     */
+    public AbstractLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        this.artifactFactory = artifactFactory;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java
new file mode 100644 (file)
index 0000000..0845dc7
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.ArchivaException;
+
+/**
+ * BuilderException - used to indicate a problem during the building of an object from file. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BuilderException
+    extends ArchivaException
+{
+
+    public BuilderException( String message, Throwable cause )
+    {
+        super( message, cause );
+    }
+
+    public BuilderException( String message )
+    {
+        super( message );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..bfee015
--- /dev/null
@@ -0,0 +1,218 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * DefaultLayoutArtifactBuilder - artifact builder for default layout repositories. 
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder"
+ *     role-hint="default"
+ */
+public class DefaultLayoutArtifactBuilder
+    extends AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    public DefaultLayoutArtifactBuilder()
+    {
+        super();
+    }
+
+    public DefaultLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        super( artifactFactory );
+    }
+
+    public Artifact build( String pathToArtifact )
+        throws BuilderException
+    {
+        if( artifactFactory == null )
+        {
+            throw new IllegalStateException( "Unable to build artifact with a null artifactFactory." );
+        }
+        
+        List pathParts = new ArrayList();
+        StringTokenizer st = new StringTokenizer( pathToArtifact, "/\\" );
+        while ( st.hasMoreTokens() )
+        {
+            pathParts.add( st.nextToken() );
+        }
+
+        Collections.reverse( pathParts );
+
+        Artifact artifact;
+        if ( pathParts.size() >= 4 )
+        {
+            // maven 2.x path
+
+            // the actual artifact filename.
+            String filename = (String) pathParts.remove( 0 );
+
+            // the next one is the version.
+            String version = (String) pathParts.remove( 0 );
+
+            // the next one is the artifactId.
+            String artifactId = (String) pathParts.remove( 0 );
+
+            // the remaining are the groupId.
+            Collections.reverse( pathParts );
+            String groupId = StringUtils.join( pathParts.iterator(), "." );
+
+            String remainingFilename = filename;
+            if ( remainingFilename.startsWith( artifactId + "-" ) )
+            {
+                remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
+
+                String classifier = null;
+
+                // TODO: use artifact handler, share with legacy discoverer
+                String type;
+                if ( remainingFilename.endsWith( ".tar.gz" ) )
+                {
+                    type = "distribution-tgz";
+                    remainingFilename = remainingFilename
+                        .substring( 0, remainingFilename.length() - ".tar.gz".length() );
+                }
+                else if ( remainingFilename.endsWith( ".zip" ) )
+                {
+                    type = "distribution-zip";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
+                }
+                else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
+                {
+                    type = "java-source";
+                    classifier = "test-sources";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+                        - "-test-sources.jar".length() );
+                }
+                else if ( remainingFilename.endsWith( "-sources.jar" ) )
+                {
+                    type = "java-source";
+                    classifier = "sources";
+                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+                        - "-sources.jar".length() );
+                }
+                else
+                {
+                    int index = remainingFilename.lastIndexOf( "." );
+                    if ( index >= 0 )
+                    {
+                        type = remainingFilename.substring( index + 1 );
+                        remainingFilename = remainingFilename.substring( 0, index );
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename does not have an extension." );
+                    }
+                }
+
+                Artifact result;
+                if ( classifier == null )
+                {
+                    result = artifactFactory
+                        .createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
+                }
+                else
+                {
+                    result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                           classifier );
+                }
+
+                if ( result.isSnapshot() )
+                {
+                    // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
+                    int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
+                    if ( classifierIndex >= 0 )
+                    {
+                        classifier = remainingFilename.substring( classifierIndex + 1 );
+                        remainingFilename = remainingFilename.substring( 0, classifierIndex );
+                        result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+                                                                               type, classifier );
+                    }
+                    else
+                    {
+                        result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
+                                                                 Artifact.SCOPE_RUNTIME, type );
+                    }
+
+                    // poor encapsulation requires we do this to populate base version
+                    if ( !result.isSnapshot() )
+                    {
+                        throw new BuilderException( "Failed to create a snapshot artifact: " + result );
+                    }
+                    else if ( !result.getBaseVersion().equals( version ) )
+                    {
+                        throw new BuilderException(
+                                                    "Built snapshot artifact base version does not match path version: "
+                                                        + result.getBaseVersion() + "; should have been version: "
+                                                        + version );
+                    }
+                    else
+                    {
+                        artifact = result;
+                    }
+                }
+                else if ( !remainingFilename.startsWith( version ) )
+                {
+                    throw new BuilderException( "Built artifact version does not match path version" );
+                }
+                else if ( !remainingFilename.equals( version ) )
+                {
+                    if ( remainingFilename.charAt( version.length() ) == '-' )
+                    {
+                        classifier = remainingFilename.substring( version.length() + 1 );
+                        artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                                 classifier );
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path version does not corresspond to an artifact version" );
+                    }
+                }
+                else
+                {
+                    artifact = result;
+                }
+            }
+            else
+            {
+                throw new BuilderException( "Path filename does not correspond to an artifact." );
+            }
+        }
+        else
+        {
+            throw new BuilderException( "Path is too short to build an artifact from." );
+        }
+
+        return artifact;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..494a4a7
--- /dev/null
@@ -0,0 +1,36 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+/**
+ * LayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @todo this concept should really exist inside of the {@link ArtifactRepositoryLayout} object in maven itself.
+ */
+public interface LayoutArtifactBuilder
+{
+    public Artifact build( String pathToArtifact ) throws BuilderException;
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java
new file mode 100644 (file)
index 0000000..e3436e9
--- /dev/null
@@ -0,0 +1,303 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+
+/**
+ * LegacyLayoutArtifactBuilder 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ *  @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder"
+ *     role-hint="legacy"
+ */
+public class LegacyLayoutArtifactBuilder
+    extends AbstractLayoutArtifactBuilder
+    implements LayoutArtifactBuilder
+{
+    public LegacyLayoutArtifactBuilder()
+    {
+        super();
+    }
+
+    public LegacyLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+    {
+        super( artifactFactory );
+    }
+
+    public Artifact build( String pathToArtifact )
+        throws BuilderException
+    {
+        if( artifactFactory == null )
+        {
+            throw new IllegalStateException( "Unable to build legacy artifact with a null artifactFactory." );
+        }
+        
+        StringTokenizer tokens = new StringTokenizer( pathToArtifact, "/\\" );
+
+        Artifact result;
+
+        int numberOfTokens = tokens.countTokens();
+
+        if ( numberOfTokens == 3 )
+        {
+            String groupId = tokens.nextToken();
+
+            String type = tokens.nextToken();
+
+            if ( type.endsWith( "s" ) )
+            {
+                type = type.substring( 0, type.length() - 1 );
+
+                // contains artifactId, version, classifier, and extension.
+                String avceGlob = tokens.nextToken();
+
+                //noinspection CollectionDeclaredAsConcreteClass
+                LinkedList avceTokenList = new LinkedList();
+
+                StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
+                while ( avceTokenizer.hasMoreTokens() )
+                {
+                    avceTokenList.addLast( avceTokenizer.nextToken() );
+                }
+
+                String lastAvceToken = (String) avceTokenList.removeLast();
+
+                // TODO: share with other discoverer, use artifact handlers instead
+                if ( lastAvceToken.endsWith( ".tar.gz" ) )
+                {
+                    type = "distribution-tgz";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( "sources.jar" ) )
+                {
+                    type = "java-source";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
+                {
+                    type = "javadoc.jar";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else if ( lastAvceToken.endsWith( ".zip" ) )
+                {
+                    type = "distribution-zip";
+
+                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
+
+                    avceTokenList.addLast( lastAvceToken );
+                }
+                else
+                {
+                    int extPos = lastAvceToken.lastIndexOf( '.' );
+
+                    if ( extPos > 0 )
+                    {
+                        String ext = lastAvceToken.substring( extPos + 1 );
+                        if ( type.equals( ext ) || "plugin".equals( type ) )
+                        {
+                            lastAvceToken = lastAvceToken.substring( 0, extPos );
+
+                            avceTokenList.addLast( lastAvceToken );
+                        }
+                        else
+                        {
+                            throw new BuilderException( "Path type does not match the extension" );
+                        }
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename does not have an extension" );
+                    }
+                }
+
+                // let's discover the version, and whatever's leftover will be either
+                // a classifier, or part of the artifactId, depending on position.
+                // Since version is at the end, we have to move in from the back.
+                Collections.reverse( avceTokenList );
+
+                // TODO: this is obscene - surely a better way?
+                String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+                    + "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+                    + "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+                    + "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+                    + "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+                    + "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+                    + "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
+
+                StringBuffer classifierBuffer = new StringBuffer();
+                StringBuffer versionBuffer = new StringBuffer();
+
+                boolean firstVersionTokenEncountered = false;
+                boolean firstToken = true;
+
+                int tokensIterated = 0;
+                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+                {
+                    String token = (String) it.next();
+
+                    boolean tokenIsVersionPart = token.matches( validVersionParts );
+
+                    StringBuffer bufferToUpdate;
+
+                    // NOTE: logic in code is reversed, since we're peeling off the back
+                    // Any token after the last versionPart will be in the classifier.
+                    // Any token UP TO first non-versionPart is part of the version.
+                    if ( !tokenIsVersionPart )
+                    {
+                        if ( firstVersionTokenEncountered )
+                        {
+                            //noinspection BreakStatement
+                            break;
+                        }
+                        else
+                        {
+                            bufferToUpdate = classifierBuffer;
+                        }
+                    }
+                    else
+                    {
+                        firstVersionTokenEncountered = true;
+
+                        bufferToUpdate = versionBuffer;
+                    }
+
+                    if ( firstToken )
+                    {
+                        firstToken = false;
+                    }
+                    else
+                    {
+                        bufferToUpdate.insert( 0, '-' );
+                    }
+
+                    bufferToUpdate.insert( 0, token );
+
+                    tokensIterated++;
+                }
+
+                // Now, restore the proper ordering so we can build the artifactId.
+                Collections.reverse( avceTokenList );
+
+                // if we didn't find a version, then punt. Use the last token
+                // as the version, and set the classifier empty.
+                if ( versionBuffer.length() < 1 )
+                {
+                    if ( avceTokenList.size() > 1 )
+                    {
+                        int lastIdx = avceTokenList.size() - 1;
+
+                        versionBuffer.append( avceTokenList.get( lastIdx ) );
+                        avceTokenList.remove( lastIdx );
+                    }
+
+                    classifierBuffer.setLength( 0 );
+                }
+                else
+                {
+                    // if everything is kosher, then pop off all the classifier and
+                    // version tokens, leaving the naked artifact id in the list.
+                    avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
+                }
+
+                StringBuffer artifactIdBuffer = new StringBuffer();
+
+                firstToken = true;
+                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+                {
+                    String token = (String) it.next();
+
+                    if ( firstToken )
+                    {
+                        firstToken = false;
+                    }
+                    else
+                    {
+                        artifactIdBuffer.append( '-' );
+                    }
+
+                    artifactIdBuffer.append( token );
+                }
+
+                String artifactId = artifactIdBuffer.toString();
+
+                if ( artifactId.length() > 0 )
+                {
+                    int lastVersionCharIdx = versionBuffer.length() - 1;
+                    if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
+                    {
+                        versionBuffer.setLength( lastVersionCharIdx );
+                    }
+
+                    String version = versionBuffer.toString();
+
+                    if ( version.length() > 0 )
+                    {
+                        if ( classifierBuffer.length() > 0 )
+                        {
+                            result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                                   classifierBuffer.toString() );
+                        }
+                        else
+                        {
+                            result = artifactFactory.createArtifact( groupId, artifactId, version,
+                                                                     Artifact.SCOPE_RUNTIME, type );
+                        }
+                    }
+                    else
+                    {
+                        throw new BuilderException( "Path filename version is empty" );
+                    }
+                }
+                else
+                {
+                    throw new BuilderException( "Path filename artifactId is empty" );
+                }
+            }
+            else
+            {
+                throw new BuilderException( "Path artifact type does not corresspond to an artifact type" );
+            }
+        }
+        else
+        {
+            throw new BuilderException( "Path does not match a legacy repository path for an artifact" );
+        }
+
+        return result;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java
new file mode 100644 (file)
index 0000000..8e3c678
--- /dev/null
@@ -0,0 +1,76 @@
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * ManagedArtifact 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedArtifact
+{
+    private String repositoryId;
+
+    private Artifact artifact;
+
+    private String path;
+
+    protected Map attached;
+
+    public ManagedArtifact( String repoId, Artifact artifact, String path )
+    {
+        super();
+        this.repositoryId = repoId;
+        this.artifact = artifact;
+        this.path = path;
+        this.attached = new HashMap();
+    }
+
+    public Artifact getArtifact()
+    {
+        return artifact;
+    }
+
+    public String getPath()
+    {
+        return path;
+    }
+
+    public String getRepositoryId()
+    {
+        return repositoryId;
+    }
+
+    public Map getAttached()
+    {
+        return attached;
+    }
+
+    public void setAttached( Map attached )
+    {
+        this.attached = attached;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java
new file mode 100644 (file)
index 0000000..b653d16
--- /dev/null
@@ -0,0 +1,81 @@
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * ManagedArtifactTypes - provides place to test an unknown artifact type.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedArtifactTypes
+{
+    public static final int GENERIC = 0;
+
+    public static final int JAVA = 1;
+
+    public static final int EJB = 2;
+
+    private static List javaArtifacts;
+
+    private static List ejbArtifacts;
+
+    static
+    {
+        javaArtifacts = new ArrayList();
+        javaArtifacts.add( "jar" );
+        javaArtifacts.add( "war" );
+        javaArtifacts.add( "sar" );
+        javaArtifacts.add( "rar" );
+        javaArtifacts.add( "ear" );
+
+        ejbArtifacts = new ArrayList();
+        ejbArtifacts.add( "ejb" );
+        ejbArtifacts.add( "ejb-client" );
+    }
+
+    public static int whichType( String type )
+    {
+        if ( StringUtils.isBlank( type ) )
+        {
+            // TODO: is an empty type even possible?
+            return GENERIC;
+        }
+
+        type = type.toLowerCase();
+
+        if ( ejbArtifacts.contains( type ) )
+        {
+            return EJB;
+        }
+
+        if ( javaArtifacts.contains( type ) )
+        {
+            return JAVA;
+        }
+
+        return GENERIC;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java
new file mode 100644 (file)
index 0000000..1759df2
--- /dev/null
@@ -0,0 +1,49 @@
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedEjbArtifact
+    extends ManagedJavaArtifact
+{
+    public static final String CLIENT = "client";
+
+    public ManagedEjbArtifact( String repoId, Artifact artifact, String path )
+    {
+        super( repoId, artifact, path );
+    }
+
+    public String getClientPath()
+    {
+        return (String) super.attached.get( CLIENT );
+    }
+
+    public void setClientPath( String clientPath )
+    {
+        super.attached.put( CLIENT, clientPath );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java
new file mode 100644 (file)
index 0000000..203234b
--- /dev/null
@@ -0,0 +1,62 @@
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source 
+ * reference jars.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedJavaArtifact
+    extends ManagedArtifact
+{
+    public static final String JAVADOC = "javadoc";
+
+    public static final String SOURCES = "sources";
+
+    public ManagedJavaArtifact( String repoId, Artifact artifact, String path )
+    {
+        super( repoId, artifact, path );
+    }
+
+    public String getJavadocPath()
+    {
+        return (String) super.attached.get( JAVADOC );
+    }
+
+    public void setJavadocPath( String javadocPath )
+    {
+        super.attached.put( JAVADOC, javadocPath );
+    }
+
+    public String getSourcesPath()
+    {
+        return (String) super.attached.get( SOURCES );
+    }
+
+    public void setSourcesPath( String sourcesPath )
+    {
+        super.attached.put( SOURCES, sourcesPath );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java
new file mode 100644 (file)
index 0000000..6034346
--- /dev/null
@@ -0,0 +1,66 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * AbstractDiscovererConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractConsumer
+    extends AbstractLogEnabled
+    implements Consumer
+{
+    /**
+     * @plexus.requirement
+     */
+    protected ArtifactFactory artifactFactory;
+    
+    protected ArtifactRepository repository;
+    
+    protected AbstractConsumer()
+    {
+        /* do nothing */
+    }
+
+    public List getExcludePatterns()
+    {
+        return Collections.EMPTY_LIST;
+    }
+
+    public boolean init( ArtifactRepository repository )
+    {
+        this.repository = repository;
+        return isEnabled();
+    }
+    
+    protected boolean isEnabled()
+    {
+        return true;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java
new file mode 100644 (file)
index 0000000..fad6f2f
--- /dev/null
@@ -0,0 +1,90 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * DiscovererConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface Consumer
+{
+    public static final String ROLE = Consumer.class.getName();
+
+    /**
+     * This is the human readable name for the discoverer.
+     * 
+     * @return the human readable discoverer name.
+     */
+    public String getName();
+    
+    /**
+     * This is used to initialize any internals in the consumer before it is used.
+     * 
+     * This method is called by the internals of archiva and is not meant to be used by other developers.
+     * This method is called once per repository.
+     * 
+     * @param repository the repository to initialize the consumer against.
+     * @return true if the repository is valid for this consumer. false will result in consumer being disabled 
+     *      for the provided repository.
+     */
+    public boolean init( ArtifactRepository repository );
+    
+    /**
+     * Get the List of excluded file patterns for this consumer.
+     * 
+     * @return the list of excluded file patterns for this consumer.
+     */
+    public List getExcludePatterns();
+    
+    /**
+     * Get the List of included file patterns for this consumer.
+     * 
+     * @return the list of included file patterns for this consumer.
+     */
+    public List getIncludePatterns();
+
+    /**
+     * Called by archiva framework to indicate that there is a file suitable for consuming, 
+     * This method will only be called if the {@link #init(ArtifactRepository)} and {@link #getExcludePatterns()}
+     * and {@link #getIncludePatterns()} all pass for this consumer.
+     * 
+     * @param file the file to process.
+     * @throws ConsumerException if there was a problem processing this file.
+     */
+    public void processFile( BaseFile file ) throws ConsumerException;
+    
+    /**
+     * Called by archiva framework to indicate that there has been a problem detected
+     * on a specific file.
+     * 
+     * NOTE: It is very possible for 1 file to have more than 1 problem associated with it.
+     * 
+     * @param file the file to process.
+     * @param message the message describing the problem.
+     */
+    public void processFileProblem( BaseFile file, String message );
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java
new file mode 100644 (file)
index 0000000..0c4c645
--- /dev/null
@@ -0,0 +1,52 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.ArchivaException;
+import org.apache.maven.archiva.common.utils.BaseFile;
+
+/**
+ * ConsumerException - details about the failure of a consumer.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ConsumerException
+    extends ArchivaException
+{
+    private BaseFile file;
+
+    public ConsumerException( BaseFile file, String message, Throwable cause )
+    {
+        super( message, cause );
+        this.file = file;
+    }
+
+    public ConsumerException( BaseFile file, String message )
+    {
+        super( message );
+        this.file = file;
+    }
+
+    public BaseFile getFile()
+    {
+        return file;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java
new file mode 100644 (file)
index 0000000..2b2343c
--- /dev/null
@@ -0,0 +1,70 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.PlexusConstants;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.context.Context;
+import org.codehaus.plexus.context.ContextException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
+
+/**
+ * DiscovererConsumerFactory - factory for consumers.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.ConsumerFactory"
+ */
+public class ConsumerFactory
+    extends AbstractLogEnabled
+    implements Contextualizable
+{
+    public static final String ROLE = ConsumerFactory.class.getName();
+
+    private PlexusContainer container;
+
+    public Consumer createConsumer( String name )
+        throws ConsumerException
+    {
+        getLogger().info( "Attempting to create consumer [" + name + "]" );
+
+        Consumer consumer;
+        try
+        {
+            consumer = (Consumer) container.lookup( Consumer.ROLE, container.getLookupRealm() );
+        }
+        catch ( Throwable t )
+        {
+            String emsg = "Unable to create consumer [" + name + "]: " + t.getMessage();
+            getLogger().warn( t.getMessage(), t );
+            throw new ConsumerException( null, emsg, t );
+        }
+
+        getLogger().info( "Created consumer [" + name + "|" + consumer.getName() + "]" );
+        return consumer;
+    }
+
+    public void contextualize( Context context )
+        throws ContextException
+    {
+        container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java
new file mode 100644 (file)
index 0000000..c9e5437
--- /dev/null
@@ -0,0 +1,130 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.artifact.builder.BuilderException;
+import org.apache.maven.archiva.common.artifact.builder.DefaultLayoutArtifactBuilder;
+import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
+import org.apache.maven.archiva.common.artifact.builder.LegacyLayoutArtifactBuilder;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * DefaultArtifactConsumer 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericArtifactConsumer
+    extends AbstractConsumer
+    implements Consumer
+{
+    public abstract void processArtifact( Artifact artifact, BaseFile file );
+
+    private Map artifactBuilders = new HashMap();
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/*.pom" );
+        includePatterns.add( "**/*.jar" );
+        includePatterns.add( "**/*.war" );
+        includePatterns.add( "**/*.ear" );
+        includePatterns.add( "**/*.sar" );
+        includePatterns.add( "**/*.zip" );
+        includePatterns.add( "**/*.gz" );
+        includePatterns.add( "**/*.bz2" );
+    }
+
+    private String layoutId = "default";
+
+    public boolean init( ArtifactRepository repository )
+    {
+        this.artifactBuilders.clear();
+        this.artifactBuilders.put( "default", new DefaultLayoutArtifactBuilder( artifactFactory ) );
+        this.artifactBuilders.put( "legacy", new LegacyLayoutArtifactBuilder( artifactFactory ) );
+
+        if ( repository.getLayout() instanceof LegacyRepositoryLayout )
+        {
+            this.layoutId = "legacy";
+        }
+
+        return super.init( repository );
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public boolean isEnabled()
+    {
+        ArtifactRepositoryLayout layout = repository.getLayout();
+        return ( layout instanceof DefaultRepositoryLayout ) || ( layout instanceof LegacyRepositoryLayout );
+    }
+
+    public void processFile( BaseFile file )
+        throws ConsumerException
+    {
+        if ( file.length() <= 0 )
+        {
+            processFileProblem( file, "File is empty." );
+        }
+
+        if ( !file.canRead() )
+        {
+            processFileProblem( file, "Not allowed to read file due to permission settings on file." );
+        }
+
+        try
+        {
+            Artifact artifact = buildArtifact( file );
+
+            processArtifact( artifact, file );
+        }
+        catch ( BuilderException e )
+        {
+            throw new ConsumerException( file, e.getMessage(), e );
+        }
+    }
+
+    private Artifact buildArtifact( BaseFile file )
+        throws BuilderException
+    {
+        LayoutArtifactBuilder builder = (LayoutArtifactBuilder) artifactBuilders.get( layoutId );
+
+        Artifact artifact = builder.build( file.getRelativePath() );
+        artifact.setRepository( repository );
+        artifact.setFile( file );
+
+        return artifact;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java
new file mode 100644 (file)
index 0000000..efcd7af
--- /dev/null
@@ -0,0 +1,98 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GenericModelConsumer - consumer for pom files.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericModelConsumer
+    extends AbstractConsumer
+    implements Consumer
+{
+    public abstract void processModel( Model model, BaseFile file );
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/*.pom" );
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public boolean isEnabled()
+    {
+        return true;
+    }
+
+    public void processFile( BaseFile file )
+        throws ConsumerException
+    {
+        Model model = buildModel( file );
+        processModel( model, file );
+    }
+
+    private Model buildModel( BaseFile file )
+        throws ConsumerException
+    {
+        Model model;
+        Reader reader = null;
+        try
+        {
+            reader = new FileReader( file );
+            MavenXpp3Reader modelReader = new MavenXpp3Reader();
+
+            model = modelReader.read( reader );
+        }
+        catch ( XmlPullParserException e )
+        {
+            throw new ConsumerException( file, "Error parsing metadata file: " + e.getMessage(), e );
+        }
+        catch ( IOException e )
+        {
+            throw new ConsumerException( file, "Error reading metadata file: " + e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( reader );
+        }
+
+        return model;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java
new file mode 100644 (file)
index 0000000..1f4433c
--- /dev/null
@@ -0,0 +1,231 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * GenericRepositoryMetadataConsumer - Consume any maven-metadata.xml files as {@link RepositoryMetadata} objects. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericRepositoryMetadataConsumer
+    extends AbstractConsumer
+    implements Consumer
+{
+    public abstract void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file );
+
+    private static final List includePatterns;
+
+    static
+    {
+        includePatterns = new ArrayList();
+        includePatterns.add( "**/maven-metadata.xml" );
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public boolean isEnabled()
+    {
+        // the RepositoryMetadata objects only exist in 'default' layout repositories.
+        ArtifactRepositoryLayout layout = repository.getLayout();
+        return ( layout instanceof DefaultRepositoryLayout );
+    }
+
+    public void processFile( BaseFile file )
+        throws ConsumerException
+    {
+        if ( file.length() <= 0 )
+        {
+            throw new ConsumerException( file, "File is empty." );
+        }
+
+        if ( !file.canRead() )
+        {
+            throw new ConsumerException( file, "Not allowed to read file due to permission settings on file." );
+        }
+
+        RepositoryMetadata metadata = buildMetadata( file );
+        processRepositoryMetadata( metadata, file );
+    }
+
+    private RepositoryMetadata buildMetadata( BaseFile metadataFile )
+        throws ConsumerException
+    {
+        Metadata m;
+        Reader reader = null;
+        try
+        {
+            reader = new FileReader( metadataFile );
+            MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
+
+            m = metadataReader.read( reader );
+        }
+        catch ( XmlPullParserException e )
+        {
+            throw new ConsumerException( metadataFile, "Error parsing metadata file: " + e.getMessage(), e );
+        }
+        catch ( IOException e )
+        {
+            throw new ConsumerException( metadataFile, "Error reading metadata file: " + e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( reader );
+        }
+
+        RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataFile );
+
+        if ( repositoryMetadata == null )
+        {
+            throw new ConsumerException( metadataFile, "Unable to build a repository metadata from path." );
+        }
+
+        return repositoryMetadata;
+    }
+
+    /**
+     * Builds a RepositoryMetadata object from a Metadata object and its path.
+     *
+     * @param m            Metadata
+     * @param metadataFile file information
+     * @return RepositoryMetadata if the parameters represent one; null if not
+     * @throws ConsumerException 
+     */
+    private RepositoryMetadata buildMetadata( Metadata m, BaseFile metadataFile )
+        throws ConsumerException
+    {
+        if ( artifactFactory == null )
+        {
+            throw new IllegalStateException( "Unable to build metadata with a null artifactFactory." );
+        }
+
+        String metaGroupId = m.getGroupId();
+        String metaArtifactId = m.getArtifactId();
+        String metaVersion = m.getVersion();
+
+        // check if the groupId, artifactId and version is in the
+        // metadataPath
+        // parse the path, in reverse order
+        List pathParts = new ArrayList();
+        StringTokenizer st = new StringTokenizer( metadataFile.getRelativePath(), "/\\" );
+        while ( st.hasMoreTokens() )
+        {
+            pathParts.add( st.nextToken() );
+        }
+
+        Collections.reverse( pathParts );
+        // remove the metadata file
+        pathParts.remove( 0 );
+        Iterator it = pathParts.iterator();
+        String tmpDir = (String) it.next();
+
+        Artifact artifact = null;
+        if ( StringUtils.isNotEmpty( metaVersion ) )
+        {
+            artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
+        }
+
+        // snapshotMetadata
+        RepositoryMetadata metadata = null;
+        if ( tmpDir != null && tmpDir.equals( metaVersion ) )
+        {
+            if ( artifact != null )
+            {
+                metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+            }
+        }
+        else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
+        {
+            // artifactMetadata
+            if ( artifact != null )
+            {
+                metadata = new ArtifactRepositoryMetadata( artifact );
+            }
+            else
+            {
+                artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
+                metadata = new ArtifactRepositoryMetadata( artifact );
+            }
+        }
+        else
+        {
+            String groupDir = "";
+            int ctr = 0;
+            for ( it = pathParts.iterator(); it.hasNext(); )
+            {
+                String path = (String) it.next();
+                if ( ctr == 0 )
+                {
+                    groupDir = path;
+                }
+                else
+                {
+                    groupDir = path + "." + groupDir;
+                }
+                ctr++;
+            }
+
+            // groupMetadata
+            if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
+            {
+                metadata = new GroupRepositoryMetadata( metaGroupId );
+            }
+            else
+            {
+                /* If we reached this point, we have some bad metadata.
+                 * We have a metadata file, with values for groupId / artifactId / version.
+                 * But the information it is providing does not exist relative to the file location.
+                 * 
+                 * See ${basedir}/src/test/repository/javax/maven-metadata.xml for example
+                 */
+                throw new ConsumerException( metadataFile,
+                                             "Contents of metadata are not appropriate for its location on disk." );
+            }
+        }
+
+        return metadata;
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java
new file mode 100644 (file)
index 0000000..a4b83db
--- /dev/null
@@ -0,0 +1,105 @@
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.net.URI;
+
+/**
+ * BaseFile - convenient File object that tracks the Base Directory and can provide relative path values
+ * for the file object based on that Base Directory value. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BaseFile
+    extends File
+{
+    private File baseDir;
+
+    public BaseFile( File pathFile )
+    {
+        this( pathFile.getAbsolutePath() );
+    }
+
+    public BaseFile( File repoDir, File pathFile )
+    {
+        this( repoDir, PathUtil.getRelative( repoDir.getAbsolutePath(), pathFile ) );
+    }
+
+    public BaseFile( File parent, String child )
+    {
+        super( parent, child );
+        this.baseDir = parent;
+    }
+
+    public BaseFile( String pathname )
+    {
+        super( pathname );
+
+        // Calculate the top level directory.
+
+        File parent = this;
+        while ( parent.getParentFile() != null )
+        {
+            parent = parent.getParentFile();
+        }
+
+        this.baseDir = parent;
+    }
+
+    public BaseFile( String repoDir, File pathFile )
+    {
+        this( new File( repoDir ), pathFile );
+    }
+
+    public BaseFile( String parent, String child )
+    {
+        super( parent, child );
+        this.baseDir = new File( parent );
+    }
+
+    public BaseFile( URI uri )
+    {
+        super( uri ); // only to satisfy java compiler.
+        throw new IllegalStateException( "The " + BaseFile.class.getName()
+            + " object does not support URI construction." );
+    }
+
+    public File getBaseDir()
+    {
+        return baseDir;
+    }
+
+    public String getRelativePath()
+    {
+        return PathUtil.getRelative( this.baseDir.getAbsolutePath(), this );
+    }
+
+    public void setBaseDir( File baseDir )
+    {
+        this.baseDir = baseDir;
+    }
+
+    public void setBaseDir( String repoDir )
+    {
+        setBaseDir( new File( repoDir ) );
+    }
+}
diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java
new file mode 100644 (file)
index 0000000..25df425
--- /dev/null
@@ -0,0 +1,56 @@
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+/**
+ * PathUtil - simple utility methods for path manipulation. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtil
+{
+    public static String getRelative( String basedir, File file )
+    {
+        return getRelative( basedir, file.getAbsolutePath() );
+    }
+
+    public static String getRelative( String basedir, String child )
+    {
+        if ( child.startsWith( basedir ) )
+        {
+            // simple solution.
+            return child.substring( basedir.length() + 1 );
+        }
+
+        String absoluteBasedir = new File( basedir ).getAbsolutePath();
+        if ( child.startsWith( absoluteBasedir ) )
+        {
+            // resolved basedir solution.
+            return child.substring( absoluteBasedir.length() + 1 );
+        }
+
+        // File is not within basedir.
+        throw new IllegalStateException( "Unable to obtain relative path of file " + child
+            + ", it is not within basedir " + basedir + "." );
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java
new file mode 100644 (file)
index 0000000..11b2d1c
--- /dev/null
@@ -0,0 +1,191 @@
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * AbstractArchivaCommonTestCase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractArchivaCommonTestCase
+    extends PlexusTestCase
+{
+    protected ArtifactRepository getLegacyRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    protected ArtifactRepository getDefaultRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    private void resetRepositoryState( ArtifactRepository repository )
+        throws IOException
+    {
+        File repoBaseDir = new File( repository.getBasedir() );
+
+        List tmpfiles = FileUtils.getFiles( repoBaseDir, ".*", "" );
+        for ( Iterator it = tmpfiles.iterator(); it.hasNext(); )
+        {
+            File hit = (File) it.next();
+            if ( hit.exists() )
+            {
+                if ( hit.isFile() )
+                {
+                    hit.delete();
+                }
+
+                if ( hit.isDirectory() )
+                {
+                    FileUtils.deleteDirectory( hit );
+                }
+            }
+        }
+    }
+
+    protected ArtifactRepository createRepository( File basedir, String layout )
+        throws Exception
+    {
+        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+        ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, repoLayout, null, null );
+    }
+
+    public List getLegacyLayoutArtifactPaths()
+    {
+        List files = new ArrayList();
+
+        files.add( "invalid/jars/1.0/invalid-1.0.jar" );
+        files.add( "invalid/jars/invalid-1.0.rar" );
+        files.add( "invalid/jars/invalid.jar" );
+        files.add( "invalid/invalid-1.0.jar" );
+        files.add( "javax.sql/jars/jdbc-2.0.jar" );
+        files.add( "org.apache.maven/jars/some-ejb-1.0-client.jar" );
+        files.add( "org.apache.maven/jars/testing-1.0.jar" );
+        files.add( "org.apache.maven/jars/testing-1.0-sources.jar" );
+        files.add( "org.apache.maven/jars/testing-UNKNOWN.jar" );
+        files.add( "org.apache.maven/jars/testing-1.0.zip" );
+        files.add( "org.apache.maven/jars/testing-1.0-20050611.112233-1.jar" );
+        files.add( "org.apache.maven/jars/testing-1.0.tar.gz" );
+        files.add( "org.apache.maven.update/jars/test-not-updated-1.0.jar" );
+        files.add( "org.apache.maven.update/jars/test-updated-1.0.jar" );
+
+        return files;
+    }
+
+    public List getDefaultLayoutArtifactPaths()
+    {
+        List files = new ArrayList();
+
+        files.add( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+        files.add( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+        files.add( "invalid/invalid/1.0/invalid-1.0b.jar" );
+        files.add( "invalid/invalid/1.0/invalid-2.0.jar" );
+        files.add( "invalid/invalid-1.0.jar" );
+        files.add( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+        files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" );
+        files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" );
+        files.add( "org/apache/maven/A/1.0/A-1.0.war" );
+        files.add( "org/apache/maven/A/1.0/A-1.0.pom" );
+        files.add( "org/apache/maven/B/2.0/B-2.0.pom" );
+        files.add( "org/apache/maven/B/1.0/B-1.0.pom" );
+        files.add( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" );
+        files.add( "org/apache/maven/C/1.0/C-1.0.war" );
+        files.add( "org/apache/maven/C/1.0/C-1.0.pom" );
+        files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" );
+        files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar" );
+        files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+        files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar" );
+        files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" );
+        files.add( "org/apache/maven/testing/1.0/testing-1.0-test-sources.jar" );
+        files.add( "org/apache/maven/testing/1.0/testing-1.0.jar" );
+        files.add( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" );
+        files.add( "org/apache/maven/testing/1.0/testing-1.0.zip" );
+        files.add( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" );
+        files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" );
+        files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.jar" );
+        files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" );
+        files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.jar" );
+        files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" );
+        files.add( "javax/sql/jdbc/2.0/jdbc-2.0.jar" );
+
+        return files;
+    }
+
+    public List getDefaultLayoutMetadataPaths()
+    {
+        List files = new ArrayList();
+
+        files.add( "org/apache/maven/some-ejb/1.0/maven-metadata.xml" );
+        files.add( "org/apache/maven/update/test-not-updated/maven-metadata.xml" );
+        files.add( "org/apache/maven/update/test-updated/maven-metadata.xml" );
+        files.add( "org/apache/maven/maven-metadata.xml" );
+        files.add( "org/apache/testgroup/discovery/1.0/maven-metadata.xml" );
+        files.add( "org/apache/testgroup/discovery/maven-metadata.xml" );
+        files.add( "javax/sql/jdbc/2.0/maven-metadata-repository.xml" );
+        files.add( "javax/sql/jdbc/maven-metadata-repository.xml" );
+        files.add( "javax/sql/maven-metadata-repository.xml" );
+        files.add( "javax/maven-metadata.xml" );
+
+        return files;
+    }
+
+    public List getDefaultLayoutModelPaths()
+    {
+        List files = new ArrayList();
+
+        files.add( "org/apache/maven/A/1.0/A-1.0.pom" );
+        files.add( "org/apache/maven/B/2.0/B-2.0.pom" );
+        files.add( "org/apache/maven/B/1.0/B-1.0.pom" );
+        files.add( "org/apache/maven/C/1.0/C-1.0.pom" );
+        files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" );
+        files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+        files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" );
+        files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" );
+        files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" );
+        files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" );
+
+        return files;
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java
new file mode 100644 (file)
index 0000000..878f7fe
--- /dev/null
@@ -0,0 +1,45 @@
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common" );
+        //$JUnit-BEGIN$
+        suite.addTest( org.apache.maven.archiva.common.artifact.builder.AllTests.suite() );
+        suite.addTest( org.apache.maven.archiva.common.consumers.AllTests.suite() );
+        suite.addTest( org.apache.maven.archiva.common.utils.AllTests.suite() );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java
new file mode 100644 (file)
index 0000000..9d9b55d
--- /dev/null
@@ -0,0 +1,51 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.PlexusTestCase;
+
+/**
+ * AbstractLayoutArtifactBuilderTestCase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilderTestCase
+extends PlexusTestCase
+{
+
+    protected void assertArtifact( String groupId, String artifactId, String version, String type, String classifier, Artifact artifact )
+    {
+        assertNotNull( "Artifact cannot be null.", artifact );
+    
+        assertEquals( "Artifact groupId", groupId, artifact.getGroupId() );
+        assertEquals( "Artifact artifactId", artifactId, artifact.getArtifactId() );
+        assertEquals( "Artifact version", version, artifact.getVersion() );
+        assertEquals( "Artifact type", type, artifact.getType() );
+    
+        if ( StringUtils.isNotBlank( classifier ) )
+        {
+            assertEquals( "Artifact classifier", classifier, artifact.getClassifier() );
+        }
+    }
+    
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java
new file mode 100644 (file)
index 0000000..45511c3
--- /dev/null
@@ -0,0 +1,44 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer.builders" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( LegacyLayoutArtifactBuilderTest.class );
+        suite.addTestSuite( DefaultLayoutArtifactBuilderTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java
new file mode 100644 (file)
index 0000000..954a5bb
--- /dev/null
@@ -0,0 +1,206 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * DefaultLayoutArtifactBuilderTest 
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultLayoutArtifactBuilderTest
+    extends AbstractLayoutArtifactBuilderTestCase
+{
+    LayoutArtifactBuilder builder;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "default" );
+        assertNotNull( builder );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( builder != null )
+        {
+            release( builder );
+        }
+        super.tearDown();
+    }
+
+    public void testPathDistributionArtifacts()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ) );
+
+        assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0.zip" ) );
+    }
+
+    public void testPathNormal()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+            .build( "/org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+        assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+            .build( "org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, builder.build( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ) );
+
+    }
+
+    public void testPathSnapshots()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT", "jar", null, builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar" ) );
+
+        assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ) );
+    }
+
+    public void testPathSnapshotWithClassifier()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", builder
+            .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ) );
+    }
+
+    public void testPathWithClassifier()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", builder
+            .build( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ) );
+    }
+
+    public void testPathWithJavaSourceInclusion()
+        throws BuilderException
+    {
+        assertArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", builder
+            .build( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ) );
+    }
+
+    public void testProblemMissingType()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1/invalid-1" );
+            fail( "Should have detected missing type." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename does not have an extension.", e.getMessage() );
+        }
+    }
+
+    public void testProblemNonSnapshotInSnapshotDir()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+            fail( "Non Snapshot artifact inside of an Snapshot dir is invalid." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", e.getMessage() );
+        }
+    }
+
+    public void testProblemPathTooShort()
+    {
+        try
+        {
+            builder.build( "invalid/invalid-1.0.jar" );
+            fail( "Should have detected that path is too short." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path is too short to build an artifact from.", e.getMessage() );
+        }
+    }
+
+    public void testProblemTimestampSnapshotNotInSnapshotDir()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+            fail( "Timestamped Snapshot artifact not inside of an Snapshot dir is invalid." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            // TODO: Is this really the right thing to do for this kind of artifact??
+            assertEquals( "Built snapshot artifact base version does not match path version: 1.0-SNAPSHOT; "
+                + "should have been version: 1.0-20050611.123456-1", e.getMessage() );
+        }
+    }
+
+    public void testProblemVersionPathMismatch()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0/invalid-2.0.jar" );
+            fail( "Should have detected version mismatch between path and artifact." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Built artifact version does not match path version", e.getMessage() );
+        }
+    }
+
+    public void testProblemVersionPathMismatchAlt()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1.0/invalid-1.0b.jar" );
+            fail( "Should have version mismatch between directory and artifact." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path version does not corresspond to an artifact version", e.getMessage() );
+        }
+    }
+
+    public void testProblemWrongArtifactId()
+    {
+        try
+        {
+            builder.build( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+            fail( "Should have detected wrong artifact Id." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename does not correspond to an artifact.", e.getMessage() );
+        }
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java
new file mode 100644 (file)
index 0000000..2fdae4a
--- /dev/null
@@ -0,0 +1,160 @@
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+/**
+ * LegacyLayoutArtifactBuilderTest 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class LegacyLayoutArtifactBuilderTest
+    extends AbstractLayoutArtifactBuilderTestCase
+{
+    LayoutArtifactBuilder builder;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "legacy" );
+        assertNotNull( builder );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( builder != null )
+        {
+            release( builder );
+        }
+        super.tearDown();
+    }
+
+    public void testPathNormal()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "javax.sql/jars/jdbc-2.0.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifact );
+    }
+
+    public void testPathFinal()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar" );
+
+        assertArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606", "jar", null, artifact );
+    }
+
+    public void testPathSnapshot()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar" );
+
+        assertArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT", "jar", null, artifact );
+    }
+
+    public void testPathJavadoc()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc", artifact );
+    }
+
+    public void testPathSources()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "javax.sql/java-sources/jdbc-2.0-sources.jar" );
+
+        assertArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources", artifact );
+    }
+
+    public void testPathPlugin()
+        throws BuilderException
+    {
+        Artifact artifact = builder.build( "maven/plugins/maven-test-plugin-1.8.jar" );
+
+        assertArtifact( "maven", "maven-test-plugin", "1.8", "plugin", null, artifact );
+    }
+
+    public void testProblemNoType()
+    {
+        try
+        {
+            builder.build( "invalid/invalid/1/invalid-1" );
+
+            fail( "Should have detected no type." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path does not match a legacy repository path for an artifact", e.getMessage() );
+        }
+    }
+
+    public void testProblemWrongArtifactPackaging()
+        throws ComponentLookupException
+    {
+        try
+        {
+            builder.build( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
+
+            fail( "Should have detected wrong package extension." );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path type does not match the extension", e.getMessage() );
+        }
+    }
+
+    public void testProblemNoArtifactId()
+    {
+        try
+        {
+            builder.build( "groupId/jars/-1.0.jar" );
+
+            fail( "Should have detected artifactId is missing" );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename artifactId is empty", e.getMessage() );
+        }
+
+        try
+        {
+            builder.build( "groupId/jars/1.0.jar" );
+
+            fail( "Should have detected artifactId is missing" );
+        }
+        catch ( BuilderException e )
+        {
+            /* expected path */
+            assertEquals( "Path filename artifactId is empty", e.getMessage() );
+        }
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java
new file mode 100644 (file)
index 0000000..08abd64
--- /dev/null
@@ -0,0 +1,52 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.AbstractArchivaCommonTestCase;
+
+/**
+ * AbstractGenericConsumerTestCase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractGenericConsumerTestCase
+    extends AbstractArchivaCommonTestCase
+{
+    protected ConsumerFactory consumerFactory;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        consumerFactory = (ConsumerFactory) lookup( ConsumerFactory.ROLE );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        if ( consumerFactory != null )
+        {
+            release( consumerFactory );
+        }
+        super.tearDown();
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java
new file mode 100644 (file)
index 0000000..b04c4c3
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.consumers" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( GenericArtifactConsumerTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java
new file mode 100644 (file)
index 0000000..2773c99
--- /dev/null
@@ -0,0 +1,112 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * FileProblemsTracker 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class FileProblemsTracker
+{
+    private Map problemMap = new HashMap();
+
+    public void addProblem( BaseFile file, String message )
+    {
+        String path = file.getRelativePath();
+        addProblem( path, message );
+    }
+
+    private void addProblem( String path, String message )
+    {
+        List problems = getProblems( path );
+        problems.add( message );
+        problemMap.put( path, problems );
+    }
+
+    public void addProblem( ConsumerException e )
+    {
+        if ( e.getFile() != null )
+        {
+            this.addProblem( e.getFile(), e.getMessage() );
+        }
+        else
+        {
+            this.addProblem( "|fatal|", e.getMessage() );
+        }
+    }
+
+    public boolean hasProblems( String path )
+    {
+        if ( !problemMap.containsKey( path ) )
+        {
+            // No tracking of path at all.
+            return false;
+        }
+
+        List problems = (List) problemMap.get( path );
+        if ( problems == null )
+        {
+            // found path, but no list.
+            return false;
+        }
+
+        return !problems.isEmpty();
+    }
+
+    public Set getPaths()
+    {
+        return problemMap.keySet();
+    }
+
+    public List getProblems( String path )
+    {
+        List problems = (List) problemMap.get( path );
+        if ( problems == null )
+        {
+            problems = new ArrayList();
+        }
+
+        return problems;
+    }
+
+    public int getProblemCount()
+    {
+        int count = 0;
+        for ( Iterator it = problemMap.values().iterator(); it.hasNext(); )
+        {
+            List problems = (List) it.next();
+            count += problems.size();
+        }
+
+        return count;
+    }
+
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java
new file mode 100644 (file)
index 0000000..1ff2d1e
--- /dev/null
@@ -0,0 +1,220 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * GenericArtifactConsumerTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericArtifactConsumerTest
+    extends AbstractGenericConsumerTestCase
+{
+    private MockArtifactConsumer getMockArtifactConsumer()
+        throws Exception
+    {
+        return (MockArtifactConsumer) consumerFactory.createConsumer( "mock-artifact" );
+    }
+
+    public void testScanLegacy()
+        throws Exception
+    {
+        ArtifactRepository repository = getLegacyRepository();
+        List consumers = new ArrayList();
+
+        MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+        mockConsumer.init( repository );
+
+        consumers.add( mockConsumer );
+
+        List files = getLegacyLayoutArtifactPaths();
+        for ( Iterator it = files.iterator(); it.hasNext(); )
+        {
+            String path = (String) it.next();
+            try
+            {
+                mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) );
+            }
+            catch ( ConsumerException e )
+            {
+                mockConsumer.getProblemsTracker().addProblem( e );
+            }
+        }
+
+        assertNotNull( consumers );
+
+        FileProblemsTracker tracker = mockConsumer.getProblemsTracker();
+
+        assertTracker( tracker, 16 );
+
+        assertHasFailureMessage( "Path does not match a legacy repository path for an artifact",
+                                 "invalid/invalid-1.0.jar", tracker );
+        assertHasFailureMessage( "Path filename version is empty", "invalid/jars/invalid.jar", tracker );
+        assertHasFailureMessage( "Path does not match a legacy repository path for an artifact",
+                                 "invalid/jars/1.0/invalid-1.0.jar", tracker );
+
+        assertEquals( 10, mockConsumer.getArtifactMap().size() );
+    }
+
+    public void testScanDefault()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+
+        MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+        mockConsumer.init( repository );
+
+        consumers.add( mockConsumer );
+
+        List files = getDefaultLayoutArtifactPaths();
+        for ( Iterator it = files.iterator(); it.hasNext(); )
+        {
+            String path = (String) it.next();
+            try
+            {
+                mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) );
+            }
+            catch ( ConsumerException e )
+            {
+                mockConsumer.getProblemsTracker().addProblem( e );
+            }
+        }
+
+        // Test gathered information from Mock consumer.
+
+        assertNotNull( consumers );
+
+        FileProblemsTracker tracker = mockConsumer.getProblemsTracker();
+
+        assertTracker( tracker, 21 );
+
+        assertHasFailureMessage( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
+                                 "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar", tracker );
+        assertHasFailureMessage( "Path is too short to build an artifact from.", "invalid/invalid-1.0.jar", tracker );
+        assertHasFailureMessage( "Built artifact version does not match path version",
+                                 "invalid/invalid/1.0/invalid-2.0.jar", tracker );
+
+        assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+        // Test for known include artifacts
+
+        Collection artifacts = mockConsumer.getArtifactMap().values();
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources", artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, artifacts );
+        assertHasArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, artifacts );
+        assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", artifacts );
+
+        // Test for known excluded files and dirs to validate exclusions.
+
+        Iterator it = mockConsumer.getArtifactMap().values().iterator();
+        while ( it.hasNext() )
+        {
+            Artifact a = (Artifact) it.next();
+            assertTrue( "Artifact " + a + " should have it's .getFile() set.", a.getFile() != null );
+            assertTrue( "Artifact " + a + " should have it's .getRepository() set.", a.getRepository() != null );
+            assertTrue( "Artifact " + a + " should have non-null repository url.", a.getRepository().getUrl() != null );
+            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+        }
+    }
+
+    private void dumpProblems( FileProblemsTracker tracker )
+    {
+        int problemNum = 0;
+        System.out.println( "-- ProblemTracker dump -------------------------" );
+        for ( Iterator itPaths = tracker.getPaths().iterator(); itPaths.hasNext(); )
+        {
+            String path = (String) itPaths.next();
+            System.out.println( " [" + problemNum + "]: " + path );
+
+            int messageNum = 0;
+            for ( Iterator itProblems = tracker.getProblems( path ).iterator(); itProblems.hasNext(); )
+            {
+                String message = (String) itProblems.next();
+                System.out.println( "    [" + messageNum + "]: " + message );
+                messageNum++;
+            }
+
+            problemNum++;
+        }
+    }
+
+    private void assertTracker( FileProblemsTracker tracker, int expectedProblemCount )
+    {
+        assertNotNull( "ProblemsTracker should not be null.", tracker );
+
+        int actualProblemCount = tracker.getProblemCount();
+        if ( expectedProblemCount != actualProblemCount )
+        {
+            dumpProblems( tracker );
+            fail( "Problem count (across all paths) expected:<" + expectedProblemCount + ">, actual:<"
+                + actualProblemCount + ">" );
+        }
+    }
+
+    private void assertHasFailureMessage( String message, String path, FileProblemsTracker tracker )
+    {
+        if ( !tracker.hasProblems( path ) )
+        {
+            fail( "There are no messages for expected path [" + path + "]" );
+        }
+
+        assertTrue( "Unable to find message [" + message + "] in path [" + path + "]", tracker.getProblems( path )
+            .contains( message ) );
+    }
+
+    private void assertHasArtifact( String groupId, String artifactId, String version, String type, String classifier,
+                                    Collection collection )
+    {
+        for ( Iterator it = collection.iterator(); it.hasNext(); )
+        {
+            Artifact artifact = (Artifact) it.next();
+            if ( StringUtils.equals( groupId, artifact.getGroupId() )
+                && StringUtils.equals( artifactId, artifact.getArtifactId() )
+                && StringUtils.equals( version, artifact.getVersion() )
+                && StringUtils.equals( type, artifact.getType() )
+                && StringUtils.equals( classifier, artifact.getClassifier() ) )
+            {
+                // Found it!
+                return;
+            }
+        }
+
+        fail( "Was unable to find artifact " + groupId + ":" + artifactId + ":" + version + ":" + type + ":"
+            + classifier );
+    }
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java
new file mode 100644 (file)
index 0000000..94fb5ca
--- /dev/null
@@ -0,0 +1,71 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.common.utils.PathUtil;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockArtifactConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ *     role-hint="mock-artifact"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    private Map artifactMap = new HashMap();
+
+    private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+    public void processArtifact( Artifact artifact, BaseFile file )
+    {
+        String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+        artifactMap.put( relpath, artifact );
+    }
+
+    public void processFileProblem( BaseFile file, String message )
+    {
+        problemsTracker.addProblem( file, message );
+    }
+
+    public Map getArtifactMap()
+    {
+        return artifactMap;
+    }
+
+    public String getName()
+    {
+        return "Mock Artifact Consumer (Testing Only)";
+    }
+    
+    public FileProblemsTracker getProblemsTracker()
+    {
+        return problemsTracker;
+    }
+}
\ No newline at end of file
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java
new file mode 100644 (file)
index 0000000..969f10b
--- /dev/null
@@ -0,0 +1,70 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.model.Model;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockModelConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ *     role-hint="mock-model"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockModelConsumer
+    extends GenericModelConsumer
+{
+    private Map modelMap = new HashMap();
+
+    private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+    public void processModel( Model model, BaseFile file )
+    {
+        modelMap.put( file.getRelativePath(), model );
+    }
+
+    public void processFileProblem( BaseFile file, String message )
+    {
+        problemsTracker.addProblem( file, message );
+    }
+
+    public Map getModelMap()
+    {
+        return modelMap;
+    }
+
+    public String getName()
+    {
+        return "Mock Model Consumer (Testing Only)";
+    }
+
+    public FileProblemsTracker getProblemsTracker()
+    {
+        return problemsTracker;
+    }
+
+}
\ No newline at end of file
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java
new file mode 100644 (file)
index 0000000..d10331a
--- /dev/null
@@ -0,0 +1,69 @@
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockRepositoryMetadataConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ *     role-hint="mock-metadata"
+ *     instantiation-strategy="per-lookup"
+ */
+public class MockRepositoryMetadataConsumer
+    extends GenericRepositoryMetadataConsumer
+{
+    private Map repositoryMetadataMap = new HashMap();
+
+    private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+    public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file )
+    {
+        repositoryMetadataMap.put( file.getRelativePath(), metadata );
+    }
+
+    public void processFileProblem( BaseFile file, String message )
+    {
+        problemsTracker.addProblem( file, message );
+    }
+
+    public Map getRepositoryMetadataMap()
+    {
+        return repositoryMetadataMap;
+    }
+
+    public String getName()
+    {
+        return "Mock RepositoryMetadata Consumer (Testing Only)";
+    }
+
+    public FileProblemsTracker getProblemsTracker()
+    {
+        return problemsTracker;
+    }
+}
\ No newline at end of file
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java
new file mode 100644 (file)
index 0000000..db0ec4f
--- /dev/null
@@ -0,0 +1,44 @@
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.utils" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( PathUtilTest.class );
+        suite.addTestSuite( BaseFileTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java
new file mode 100644 (file)
index 0000000..53412ba
--- /dev/null
@@ -0,0 +1,115 @@
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+import junit.framework.TestCase;
+
+/**
+ * BaseFileTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BaseFileTest
+    extends TestCase
+{
+    public void testFileString()
+    {
+        File repoDir = new File( "/home/user/foo/repository" );
+        String pathFile = "path/to/resource.xml";
+        BaseFile file = new BaseFile( repoDir, pathFile );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+
+    public void testFileFile()
+    {
+        File repoDir = new File( "/home/user/foo/repository" );
+        File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+        BaseFile file = new BaseFile( repoDir, pathFile );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+
+    public void testStringFile()
+    {
+        String repoDir = "/home/user/foo/repository";
+        File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+        BaseFile file = new BaseFile( repoDir, pathFile );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+
+    public void testFileThenSetBaseString()
+    {
+        String repoDir = "/home/user/foo/repository";
+        File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+        BaseFile file = new BaseFile( pathFile );
+        file.setBaseDir( repoDir );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+
+    public void testFileThenSetBaseFile()
+    {
+        File repoDir = new File( "/home/user/foo/repository" );
+        File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+        BaseFile file = new BaseFile( pathFile );
+        file.setBaseDir( repoDir );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+    
+    public void testStringThenSetBaseString()
+    {
+        String repoDir = "/home/user/foo/repository";
+        String pathFile = "/home/user/foo/repository/path/to/resource.xml";
+        BaseFile file = new BaseFile( pathFile );
+        file.setBaseDir( repoDir );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }
+
+    public void testStringThenSetBaseFile()
+    {
+        File repoDir = new File( "/home/user/foo/repository" );
+        String pathFile = "/home/user/foo/repository/path/to/resource.xml";
+        BaseFile file = new BaseFile( pathFile );
+        file.setBaseDir( repoDir );
+
+        assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+        assertEquals( "path/to/resource.xml", file.getRelativePath() );
+        assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+    }    
+}
diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java
new file mode 100644 (file)
index 0000000..58abbd2
--- /dev/null
@@ -0,0 +1,40 @@
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.PathUtil;
+
+import junit.framework.TestCase;
+
+/**
+ * PathUtilTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtilTest
+    extends TestCase
+{
+    public void testToRelative()
+    {
+        assertEquals( "path/to/resource.xml", PathUtil.getRelative( "/home/user/foo/repository",
+                                                                    "/home/user/foo/repository/path/to/resource.xml" ) );
+    }
+}
diff --git a/archiva-common/src/test/legacy-repository/CVS/Root b/archiva-common/src/test/legacy-repository/CVS/Root
new file mode 100644 (file)
index 0000000..2e65f24
--- /dev/null
@@ -0,0 +1 @@
+not a real CVS root - for testing exclusions
diff --git a/archiva-common/src/test/legacy-repository/KEYS b/archiva-common/src/test/legacy-repository/KEYS
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/foo/invalid-1.0.foo b/archiva-common/src/test/legacy-repository/invalid/foo/invalid-1.0.foo
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/invalid-1.0.jar b/archiva-common/src/test/legacy-repository/invalid/invalid-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/1.0/invalid-1.0.jar b/archiva-common/src/test/legacy-repository/invalid/jars/1.0/invalid-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/invalid-1.0.rar b/archiva-common/src/test/legacy-repository/invalid/jars/invalid-1.0.rar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/invalid.jar b/archiva-common/src/test/legacy-repository/invalid/jars/invalid.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/no-extension b/archiva-common/src/test/legacy-repository/invalid/jars/no-extension
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/javax.sql/jars/jdbc-2.0.jar b/archiva-common/src/test/legacy-repository/javax.sql/jars/jdbc-2.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/some-ejb-1.0-client.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/some-ejb-1.0-client.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-20050611.112233-1.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-20050611.112233-1.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-sources.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-sources.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.tar.gz b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.tar.gz
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.zip b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.zip
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-UNKNOWN.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-UNKNOWN.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/CVS/Root b/archiva-common/src/test/repository/CVS/Root
new file mode 100644 (file)
index 0000000..2e65f24
--- /dev/null
@@ -0,0 +1 @@
+not a real CVS root - for testing exclusions
diff --git a/archiva-common/src/test/repository/KEYS b/archiva-common/src/test/repository/KEYS
new file mode 100644 (file)
index 0000000..d3b34d5
--- /dev/null
@@ -0,0 +1 @@
+test KEYS file
\ No newline at end of file
diff --git a/archiva-common/src/test/repository/invalid/invalid-1.0.jar b/archiva-common/src/test/repository/invalid/invalid-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar b/archiva-common/src/test/repository/invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar b/archiva-common/src/test/repository/invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-1.0b.jar b/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-1.0b.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-2.0.jar b/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-2.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/invalid/invalid/1/invalid-1 b/archiva-common/src/test/repository/invalid/invalid/1/invalid-1
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/javax/maven-metadata.xml b/archiva-common/src/test/repository/javax/maven-metadata.xml
new file mode 100644 (file)
index 0000000..b3baf54
--- /dev/null
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<!-- This metdata is intentionally wrong.  -->
+<metadata>
+  <groupId>javax.sql</groupId>
+  <artifactId>jdbc</artifactId>
+  <version>2.0</version>
+</metadata>
diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/2.0/jdbc-2.0.jar b/archiva-common/src/test/repository/javax/sql/jdbc/2.0/jdbc-2.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml
new file mode 100644 (file)
index 0000000..caf5b66
--- /dev/null
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>javax.sql</groupId>
+  <artifactId>jdbc</artifactId>
+  <version>2.0</version>
+</metadata>
diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml
new file mode 100644 (file)
index 0000000..bb75708
--- /dev/null
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>javax.sql</groupId>
+  <artifactId>jdbc</artifactId>
+  <version>2.0</version>
+  <versioning>
+    <versions>
+      <version>2.0</version>
+    </versions>
+  </versioning>
+</metadata>
diff --git a/archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml
new file mode 100644 (file)
index 0000000..caf5b66
--- /dev/null
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>javax.sql</groupId>
+  <artifactId>jdbc</artifactId>
+  <version>2.0</version>
+</metadata>
diff --git a/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom
new file mode 100644 (file)
index 0000000..202a0a4
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>A</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>war</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom
new file mode 100644 (file)
index 0000000..fa5f8f6
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>B</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>pom</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom b/archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom
new file mode 100644 (file)
index 0000000..c3034e8
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>B</artifactId>
+  <version>2.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>pom</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom
new file mode 100644 (file)
index 0000000..ae14cd7
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>C</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>war</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom
new file mode 100644 (file)
index 0000000..5a29f61
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>discovery</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>pom</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml
new file mode 100644 (file)
index 0000000..8ce7fc7
--- /dev/null
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>org.apache.maven</groupId>
+</metadata>
\ No newline at end of file
diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom
new file mode 100644 (file)
index 0000000..6ab57d1
--- /dev/null
@@ -0,0 +1,29 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>C</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <!-- default packaging is jar -->
+  <!--packaging>jar</packaging-->
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom
new file mode 100644 (file)
index 0000000..a959980
--- /dev/null
@@ -0,0 +1,29 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven</groupId>
+  <artifactId>C</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <!-- specified packaging -->
+  <packaging>jar</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/maven-metadata.xml
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar b/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-sources.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-sources.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-test-sources.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-test-sources.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.jar
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.tar.gz b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.tar.gz
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.zip b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.zip
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom
new file mode 100644 (file)
index 0000000..452727f
--- /dev/null
@@ -0,0 +1,29 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven.update</groupId>
+  <artifactId>test-not-updated</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <!-- default packaging is jar -->
+  <!--packaging>jar</packaging-->
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml
new file mode 100644 (file)
index 0000000..bd56a21
--- /dev/null
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>org.apache.maven.update</groupId>
+  <artifactId>test-not-updated</artifactId>
+</metadata>
\ No newline at end of file
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar
new file mode 100644 (file)
index 0000000..54d190b
--- /dev/null
@@ -0,0 +1 @@
+dummy content. sample file only.\r
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom
new file mode 100644 (file)
index 0000000..edd7b64
--- /dev/null
@@ -0,0 +1,29 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.maven.update</groupId>
+  <artifactId>test-updated</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <!-- default packaging is jar -->
+  <!--packaging>jar</packaging-->
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml
new file mode 100644 (file)
index 0000000..86e063c
--- /dev/null
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>org.apache.maven.update</groupId>
+  <artifactId>test-updated</artifactId>
+</metadata>
\ No newline at end of file
diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom
new file mode 100644 (file)
index 0000000..12538e8
--- /dev/null
@@ -0,0 +1,28 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.testgroup</groupId>
+  <artifactId>discovery</artifactId>
+  <version>1.0</version>
+  <name>Maven Test Repository Artifact Discovery</name>
+  <packaging>pom</packaging>
+</project>
diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml
new file mode 100644 (file)
index 0000000..8ee1804
--- /dev/null
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>org.apache.testgroup</groupId>
+  <artifactId>discovery</artifactId>
+  <version>1.0</version>
+</metadata>
\ No newline at end of file
diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml
new file mode 100644 (file)
index 0000000..b024ef7
--- /dev/null
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<metadata>
+  <groupId>org.apache.testgroup</groupId>
+  <artifactId>discovery</artifactId>
+</metadata>
\ No newline at end of file
diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml
new file mode 100644 (file)
index 0000000..5760e89
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+      <role-hint>mock-artifact</role-hint>
+      <implementation>org.apache.maven.archiva.common.consumers.MockArtifactConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml
new file mode 100644 (file)
index 0000000..2ded1b6
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+      <role-hint>mock-model</role-hint>
+      <implementation>org.apache.maven.archiva.discoverer.consumers.MockModelConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml
new file mode 100644 (file)
index 0000000..da9864d
--- /dev/null
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+      <role-hint>mock-metadata</role-hint>
+      <implementation>org.apache.maven.archiva.discoverer.consumers.MockRepositoryMetadataConsumer</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+        </requirement>
+      </requirements>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
index d414a9273c1992f8e73ed4e5ac1b6336bdaa3017..2893e289cf48566ff5bef7fefdd8b1ff8bd5cd49 100644 (file)
           </description>\r
         </field>\r
         <field>\r
-          <name>indexerCronExpression</name>\r
+          <name>dataRefreshCronExpression</name>\r
           <version>1.0.0</version>\r
           <type>String</type>\r
-          <description>When to run the indexing mechanism. Default is every 0 and 30 mins of any hour.</description>\r
+          <description>When to run the data refresh task. Default is every 30 mins (translated as every 0 and 30 minute reading of every hour)</description>\r
           <defaultValue>0 0,30 * * * ?</defaultValue>\r
-        </field>\r
+        </field>
+        <!-- 
         <field>\r
           <name>globalBlackListPatterns</name>\r
           <version>1.0.0</version>\r
             <type>String</type>\r
             <multiplicity>*</multiplicity>\r
           </association>\r
-        </field>\r
+        </field>
+           -->\r
         <field>\r
           <name>proxy</name>\r
           <version>1.0.0</version>\r
index 94a2e31662ae558c51c94bb56985711d209f4e9e..5164c561dddab4970f918c373b092f4db631e4b0 100644 (file)
@@ -43,7 +43,7 @@ public class ArchivaConfigurationTest
         // check default configuration
         assertNotNull( "check configuration returned", configuration );
         assertEquals( "check configuration has default elements", "0 0,30 * * * ?",
-                      configuration.getIndexerCronExpression() );
+                      configuration.getDataRefreshCronExpression() );
         assertNull( "check configuration has default elements", configuration.getIndexPath() );
         assertTrue( "check configuration has default elements", configuration.getRepositories().isEmpty() );
     }
index d123cdcf99bcb284f46c0321f2026c8bfa4f747d..49bde7bd234340856878f26cc7b2d12b49375c4a 100644 (file)
   <artifactId>archiva-converter</artifactId>
   <name>Archiva Repository Converter</name>
   <dependencies>
+    <dependency>
+      <groupId>org.apache.maven.archiva</groupId>
+      <artifactId>archiva-discoverer</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
       <artifactId>plexus-utils</artifactId>
@@ -46,8 +50,8 @@
       <artifactId>maven-model-converter</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-reports-standard</artifactId>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-digest</artifactId>
     </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
     </dependency>
+    <!-- TEST DEPS -->
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <version>1.7.3.3</version>
+      <scope>test</scope>
+    </dependency>
     <!-- Needed for PlexusTestCase -->
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java
new file mode 100644 (file)
index 0000000..bd5ef58
--- /dev/null
@@ -0,0 +1,128 @@
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * ConversionEvent 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ConversionEvent
+{
+    public static final int STARTED = 0;
+
+    public static final int PROCESSED = 1;
+
+    public static final int WARNING = 2;
+
+    public static final int ERROR = 3;
+
+    public static final int FINISHED = 4;
+
+    private int type;
+
+    private String message;
+
+    private Artifact artifact;
+
+    private ArtifactRepository repository;
+
+    private Exception exception;
+
+    public ConversionEvent( ArtifactRepository repository, int type )
+    {
+        this.repository = repository;
+        this.type = type;
+    }
+    
+    public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact )
+    {
+        this( repository, type );
+        this.artifact = artifact;
+    }
+    
+    public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, String message )
+    {
+        this( repository, type );
+        this.artifact = artifact;
+        this.message = message;
+    }
+    
+    public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, Exception exception )
+    {
+        this( repository, type );
+        this.artifact = artifact;
+        this.exception = exception;
+    }
+
+    public Artifact getArtifact()
+    {
+        return artifact;
+    }
+
+    public Exception getException()
+    {
+        return exception;
+    }
+
+    public String getMessage()
+    {
+        return message;
+    }
+
+    public ArtifactRepository getRepository()
+    {
+        return repository;
+    }
+
+    /**
+     * <p>
+     * The type of event.
+     * </p>
+     * 
+     * <p>
+     * Can be one of the following ...
+     * </p>
+     * 
+     * <ul>
+     * <li>{@link #STARTED} - the whole repository conversion process has started.
+     *      only seen when using the whole repository conversion technique with the
+     *      {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)} 
+     *      method.</li>
+     * <li>{@link #PROCESSED} - a specific artifact has been processed.</li>
+     * <li>{@link #WARNING} - a warning has been detected for a specific artifact during the conversion process.</li>
+     * <li>{@link #ERROR} - an error in the processing of an artifact has been detected.</li>
+     * <li>{@link #FINISHED} - the whole repository conversion process has finished.
+     *      only seen when using the whole repository conversion technique with the
+     *      {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)} 
+     *      method.</li>
+     * </ul>
+     * @return
+     */
+    public int getType()
+    {
+        return type;
+    }
+}
diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java
new file mode 100644 (file)
index 0000000..3193bd4
--- /dev/null
@@ -0,0 +1,31 @@
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ConversionListener 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface ConversionListener
+{
+    public void conversionEvent( ConversionEvent event );
+}
index c723a11aba0f700dcef1c8762a2676e712366e8a..5b03d6f48717e9a5037c6435f12e6a159fa3e9d1 100644 (file)
@@ -22,7 +22,6 @@ package org.apache.maven.archiva.converter;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.maven.archiva.converter.transaction.FileTransaction;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
@@ -38,7 +37,6 @@ import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
 import org.apache.maven.model.DistributionManagement;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
 import org.apache.maven.model.converter.ModelConverter;
 import org.apache.maven.model.converter.PomTranslationException;
 import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
@@ -46,6 +44,7 @@ import org.apache.maven.model.v3_0_0.io.xpp3.MavenXpp3Reader;
 import org.codehaus.plexus.digest.Digester;
 import org.codehaus.plexus.digest.DigesterException;
 import org.codehaus.plexus.i18n.I18N;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
 import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
 
 import java.io.File;
@@ -54,6 +53,7 @@ import java.io.FileReader;
 import java.io.IOException;
 import java.io.StringReader;
 import java.io.StringWriter;
+import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
@@ -67,6 +67,7 @@ import java.util.regex.Matcher;
  * @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default"
  */
 public class DefaultRepositoryConverter
+    extends AbstractLogEnabled
     implements RepositoryConverter
 {
     /**
@@ -81,11 +82,6 @@ public class DefaultRepositoryConverter
      */
     private ArtifactFactory artifactFactory;
 
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactPomRewriter rewriter;
-
     /**
      * @plexus.requirement
      */
@@ -111,7 +107,9 @@ public class DefaultRepositoryConverter
      */
     private I18N i18n;
 
-    public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    private List listeners = new ArrayList();
+
+    public void convert( Artifact artifact, ArtifactRepository targetRepository )
         throws RepositoryConversionException
     {
         if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
@@ -119,20 +117,19 @@ public class DefaultRepositoryConverter
             throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
         }
 
-        if ( validateMetadata( artifact, reporter ) )
+        if ( validateMetadata( artifact ) )
         {
             FileTransaction transaction = new FileTransaction();
 
-            if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+            if ( copyPom( artifact, targetRepository, transaction ) )
             {
-                if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+                if ( copyArtifact( artifact, targetRepository, transaction ) )
                 {
                     Metadata metadata = createBaseMetadata( artifact );
                     Versioning versioning = new Versioning();
                     versioning.addVersion( artifact.getBaseVersion() );
                     metadata.setVersioning( versioning );
-                    updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
-                                    transaction );
+                    updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, transaction );
 
                     metadata = createBaseMetadata( artifact );
                     metadata.setVersion( artifact.getBaseVersion() );
@@ -173,8 +170,8 @@ public class DefaultRepositoryConverter
                                  Metadata newMetadata, FileTransaction transaction )
         throws RepositoryConversionException
     {
-        File file = new File( targetRepository.getBasedir(),
-                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File file = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
 
         Metadata metadata;
         boolean changed;
@@ -244,7 +241,7 @@ public class DefaultRepositoryConverter
         return metadata;
     }
 
-    private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
+    private boolean validateMetadata( Artifact artifact )
         throws RepositoryConversionException
     {
         ArtifactRepository repository = artifact.getRepository();
@@ -252,12 +249,11 @@ public class DefaultRepositoryConverter
         boolean result = true;
 
         RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
-        File file =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+        File file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
         if ( file.exists() )
         {
             Metadata metadata = readMetadata( file );
-            result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+            result = validateMetadata( metadata, repositoryMetadata, artifact );
         }
 
         repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
@@ -265,14 +261,13 @@ public class DefaultRepositoryConverter
         if ( file.exists() )
         {
             Metadata metadata = readMetadata( file );
-            result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+            result = result && validateMetadata( metadata, repositoryMetadata, artifact );
         }
 
         return result;
     }
 
-    private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
-                                      ReportingDatabase reporter )
+    private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
     {
         String groupIdKey;
         String artifactIdKey = null;
@@ -302,14 +297,14 @@ public class DefaultRepositoryConverter
 
         if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
         {
-            addFailure( reporter, artifact, groupIdKey );
+            addFailure( artifact, groupIdKey );
             result = false;
         }
         if ( !repositoryMetadata.storedInGroupDirectory() )
         {
             if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
             {
-                addFailure( reporter, artifact, artifactIdKey );
+                addFailure( artifact, artifactIdKey );
                 result = false;
             }
             if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
@@ -319,8 +314,7 @@ public class DefaultRepositoryConverter
                 boolean foundVersion = false;
                 if ( metadata.getVersioning() != null )
                 {
-                    for ( Iterator i = metadata.getVersioning().getVersions().iterator();
-                          i.hasNext() && !foundVersion; )
+                    for ( Iterator i = metadata.getVersioning().getVersions().iterator(); i.hasNext() && !foundVersion; )
                     {
                         String version = (String) i.next();
                         if ( version.equals( artifact.getBaseVersion() ) )
@@ -332,7 +326,7 @@ public class DefaultRepositoryConverter
 
                 if ( !foundVersion )
                 {
-                    addFailure( reporter, artifact, versionsKey );
+                    addFailure( artifact, versionsKey );
                     result = false;
                 }
             }
@@ -341,7 +335,7 @@ public class DefaultRepositoryConverter
                 // snapshot metadata
                 if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
                 {
-                    addFailure( reporter, artifact, versionKey );
+                    addFailure( artifact, versionKey );
                     result = false;
                 }
 
@@ -364,7 +358,7 @@ public class DefaultRepositoryConverter
 
                         if ( !correct )
                         {
-                            addFailure( reporter, artifact, snapshotKey );
+                            addFailure( artifact, snapshotKey );
                             result = false;
                         }
                     }
@@ -374,30 +368,30 @@ public class DefaultRepositoryConverter
         return result;
     }
 
-    private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+    private void addFailure( Artifact artifact, String key )
     {
-        addFailureWithReason( reporter, artifact, getI18NString( key ) );
-
+        addFailureWithReason( artifact, getI18NString( key ) );
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+    private void addWarning( Artifact artifact, String message )
     {
         // TODO: should we be able to identify/fix these?
-        reporter.addWarning( artifact, null, null, message );
+        // TODO: write archiva-artifact-repair module
+        triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.WARNING, artifact,
+                                                     message ) );
     }
 
-    private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+    private void addFailureWithReason( Artifact artifact, String reason )
     {
         // TODO: should we be able to identify/fix these?
-        reporter.addFailure( artifact, null, null, reason );
+        triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.ERROR, artifact, reason ) );
     }
 
-    private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
-                             FileTransaction transaction )
+    private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
         throws RepositoryConversionException
     {
-        Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
-                                                              artifact.getVersion() );
+        Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact
+            .getVersion() );
         pom.setBaseVersion( artifact.getBaseVersion() );
         ArtifactRepository repository = artifact.getRepository();
         File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
@@ -411,7 +405,7 @@ public class DefaultRepositoryConverter
             boolean checksumsValid = false;
             try
             {
-                if ( testChecksums( artifact, file, reporter ) )
+                if ( testChecksums( artifact, file ) )
                 {
                     checksumsValid = true;
                 }
@@ -457,16 +451,15 @@ public class DefaultRepositoryConverter
 
                     if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
                     {
-                        Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(),
-                                                                                       artifact.getArtifactId(),
-                                                                                       artifact.getVersion() );
+                        Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+                            .getArtifactId(), artifact.getVersion() );
                         targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
                     }
 
                     Model v4Model = translator.translate( v3Model );
 
-                    translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(),
-                                                 v3Model.getVersion(), v3Model.getPackage() );
+                    translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), v3Model
+                        .getVersion(), v3Model.getPackage() );
 
                     writer = new StringWriter();
                     MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
@@ -479,13 +472,12 @@ public class DefaultRepositoryConverter
                     for ( Iterator i = warnings.iterator(); i.hasNext(); )
                     {
                         String message = (String) i.next();
-                        addWarning( reporter, artifact, message );
+                        addWarning( artifact, message );
                     }
                 }
                 catch ( XmlPullParserException e )
                 {
-                    addFailureWithReason( reporter, artifact,
-                                          getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+                    addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
                     result = false;
                 }
                 catch ( IOException e )
@@ -494,8 +486,7 @@ public class DefaultRepositoryConverter
                 }
                 catch ( PomTranslationException e )
                 {
-                    addFailureWithReason( reporter, artifact,
-                                          getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+                    addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
                     result = false;
                 }
                 finally
@@ -506,7 +497,7 @@ public class DefaultRepositoryConverter
         }
         else
         {
-            addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
+            addWarning( artifact, getI18NString( "warning.missing.pom" ) );
         }
         return result;
     }
@@ -516,8 +507,8 @@ public class DefaultRepositoryConverter
         throws IOException
     {
         Properties properties = v3Model.getProperties();
-        if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
-            properties.containsKey( "relocated.version" ) )
+        if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" )
+            || properties.containsKey( "relocated.version" ) )
         {
             String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
             properties.remove( "relocated.groupId" );
@@ -600,7 +591,7 @@ public class DefaultRepositoryConverter
         return i18n.getString( getClass().getName(), Locale.getDefault(), key );
     }
 
-    private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
+    private boolean testChecksums( Artifact artifact, File file )
         throws IOException
     {
         boolean result = true;
@@ -609,7 +600,7 @@ public class DefaultRepositoryConverter
         {
             Digester digester = (Digester) it.next();
             result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
-                                      reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
+                                      artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
         }
         return result;
     }
@@ -623,8 +614,7 @@ public class DefaultRepositoryConverter
         return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
     }
 
-    private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
-                                    Artifact artifact, String key )
+    private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact, String key )
         throws IOException
     {
         boolean result = true;
@@ -639,15 +629,14 @@ public class DefaultRepositoryConverter
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, artifact, key );
+                addFailure( artifact, key );
                 result = false;
             }
         }
         return result;
     }
 
-    private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
-                                  FileTransaction transaction )
+    private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
         throws RepositoryConversionException
     {
         File sourceFile = artifact.getFile();
@@ -668,7 +657,7 @@ public class DefaultRepositoryConverter
                 matching = FileUtils.contentEquals( sourceFile, targetFile );
                 if ( !matching )
                 {
-                    addFailure( reporter, artifact, "failure.target.already.exists" );
+                    addFailure( artifact, "failure.target.already.exists" );
                     result = false;
                 }
             }
@@ -676,7 +665,7 @@ public class DefaultRepositoryConverter
             {
                 if ( force || !matching )
                 {
-                    if ( testChecksums( artifact, sourceFile, reporter ) )
+                    if ( testChecksums( artifact, sourceFile ) )
                     {
                         transaction.copyFile( sourceFile, targetFile, digesters );
                     }
@@ -694,7 +683,7 @@ public class DefaultRepositoryConverter
         return result;
     }
 
-    public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    public void convert( List artifacts, ArtifactRepository targetRepository )
         throws RepositoryConversionException
     {
         for ( Iterator i = artifacts.iterator(); i.hasNext(); )
@@ -703,20 +692,49 @@ public class DefaultRepositoryConverter
 
             try
             {
-                convert( artifact, targetRepository, reporter );
+                convert( artifact, targetRepository );
             }
             catch ( RepositoryConversionException e )
             {
-                // Need to add:
-                // artifact
-                // processor
-                // problem
-                // reason
-                //TODO: this doesn't really provide any real facility for a decent error message, having
-                // the stack trace would be useful. I also have no idea what a processor is currently or
-                // how to get hold of it here.
-
-                reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
+                triggerConversionEvent( new ConversionEvent( targetRepository, ConversionEvent.ERROR, artifact, e ) );
+            }
+        }
+    }
+
+    /**
+     * Add a listener to the conversion process.
+     * 
+     * @param listener the listener to add.
+     */
+    public void addConversionListener( ConversionListener listener )
+    {
+        listeners.add( listener );
+    }
+
+    /**
+     * Remove a listener from the conversion process.
+     * 
+     * @param listener the listener to remove.
+     */
+    public void removeConversionListener( ConversionListener listener )
+    {
+        listeners.remove( listener );
+    }
+
+    private void triggerConversionEvent( ConversionEvent event )
+    {
+        Iterator it = listeners.iterator();
+        while ( it.hasNext() )
+        {
+            ConversionListener listener = (ConversionListener) it.next();
+
+            try
+            {
+                listener.conversionEvent( event );
+            }
+            catch ( Throwable t )
+            {
+                getLogger().warn( "ConversionEvent resulted in exception from listener: " + t.getMessage(), t );
             }
         }
     }
index d47d5b1f9ccd3bece021d775f4c2e0927a7e830e..37c21fcce911fa119588b1d18be1099a1a63472f 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.converter;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 
@@ -39,9 +38,8 @@ public interface RepositoryConverter
      *
      * @param artifact         the artifact to convert
      * @param targetRepository the target repository
-     * @param reporter         reporter to track the results of the conversion
      */
-    void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    void convert( Artifact artifact, ArtifactRepository targetRepository )
         throws RepositoryConversionException;
 
     /**
@@ -49,8 +47,21 @@ public interface RepositoryConverter
      *
      * @param artifacts        the set of artifacts to convert
      * @param targetRepository the target repository
-     * @param reporter         reporter to track the results of the conversions
      */
-    void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+    void convert( List artifacts, ArtifactRepository targetRepository )
         throws RepositoryConversionException;
+    
+    /**
+     * Add a listener to the conversion process.
+     * 
+     * @param listener the listener to add.
+     */
+    void addConversionListener( ConversionListener listener );
+    
+    /**
+     * Remove a listener from the conversion process.
+     * 
+     * @param listener the listener to remove.
+     */
+    void removeConversionListener( ConversionListener listener );
 }
diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java
new file mode 100644 (file)
index 0000000..469cc33
--- /dev/null
@@ -0,0 +1,124 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Jason van Zyl
+ * @plexus.component
+ * @todo turn this into a general conversion component and hide all this crap here.
+ * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
+ */
+public class DefaultLegacyRepositoryConverter
+    implements LegacyRepositoryConverter
+{
+    /**
+     * @plexus.requirement role-hint="legacy"
+     */
+    private ArtifactRepositoryLayout legacyLayout;
+
+    /**
+     * @plexus.requirement role-hint="default"
+     */
+    private ArtifactRepositoryLayout defaultLayout;
+
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactRepositoryFactory artifactRepositoryFactory;
+
+    /**
+     * @plexus.requirement role-hint="default"
+     */
+    private Discoverer discoverer;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.common.consumers.Consumer" role-hint="legacy-converter"
+     */
+    private LegacyConverterArtifactConsumer legacyConverterConsumer;
+
+    public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
+                                         List fileExclusionPatterns, boolean includeSnapshots )
+        throws RepositoryConversionException
+    {
+        ArtifactRepository legacyRepository;
+
+        ArtifactRepository repository;
+
+        try
+        {
+            legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDirectory
+                .toURI().toURL().toString(), legacyLayout, null, null );
+
+            repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDirectory.toURI()
+                .toURL().toString(), defaultLayout, null, null );
+        }
+        catch ( MalformedURLException e )
+        {
+            throw new RepositoryConversionException( "Error convering legacy repository.", e );
+        }
+
+        try
+        {
+            List consumers = new ArrayList();
+            legacyConverterConsumer.setDestinationRepository( repository );
+            consumers.add( legacyConverterConsumer );
+
+            discoverer.walkRepository( legacyRepository, consumers, includeSnapshots );
+        }
+        catch ( DiscovererException e )
+        {
+            throw new RepositoryConversionException( "Unable to convert repository due to discoverer error:"
+                + e.getMessage(), e );
+        }
+    }
+
+    /**
+     * Add a listener to the conversion process.
+     * 
+     * @param listener the listener to add.
+     */
+    public void addConversionListener( ConversionListener listener )
+    {
+        legacyConverterConsumer.addConversionListener( listener );
+    }
+
+    /**
+     * Remove a listener from the conversion process.
+     * 
+     * @param listener the listener to remove.
+     */
+    public void removeConversionListener( ConversionListener listener )
+    {
+        legacyConverterConsumer.removeConversionListener( listener );
+    }
+}
diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java
new file mode 100644 (file)
index 0000000..a5b758c
--- /dev/null
@@ -0,0 +1,105 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * LegacyConverterArtifactConsumer - convert artifacts as they are found
+ * into the destination repository. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ *     role-hint="legacy-converter"
+ *     instantiation-strategy="per-lookup"
+ */
+public class LegacyConverterArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryConverter repositoryConverter;
+
+    private ArtifactRepository destinationRepository;
+
+    public void processArtifact( Artifact artifact, BaseFile file )
+    {
+        try
+        {
+            repositoryConverter.convert( artifact, destinationRepository );
+        }
+        catch ( RepositoryConversionException e )
+        {
+            getLogger().error(
+                               "Unable to convert artifact " + artifact + " to destination repository "
+                                   + destinationRepository, e );
+        }
+    }
+
+    public void processFileProblem( BaseFile path, String message )
+    {
+        getLogger().error( "Artifact Build Failure on " + path + " : " + message );
+        // TODO: report this to the ConversionListener?
+    }
+
+    public ArtifactRepository getDestinationRepository()
+    {
+        return destinationRepository;
+    }
+
+    public void setDestinationRepository( ArtifactRepository destinationRepository )
+    {
+        this.destinationRepository = destinationRepository;
+    }
+    
+    public String getName()
+    {
+        return "Legacy Artifact Converter Consumer";
+    }
+    
+    /**
+     * Add a listener to the conversion process.
+     * 
+     * @param listener the listener to add.
+     */
+    public void addConversionListener( ConversionListener listener )
+    {
+        repositoryConverter.addConversionListener( listener );
+    }
+
+    /**
+     * Remove a listener from the conversion process.
+     * 
+     * @param listener the listener to remove.
+     */
+    public void removeConversionListener( ConversionListener listener )
+    {
+        repositoryConverter.removeConversionListener( listener );
+    }    
+}
diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java
new file mode 100644 (file)
index 0000000..60d3d55
--- /dev/null
@@ -0,0 +1,64 @@
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Convert an entire repository.
+ * 
+ * @author Jason van Zyl
+ */
+public interface LegacyRepositoryConverter
+{
+    String ROLE = LegacyRepositoryConverter.class.getName();
+
+    /**
+     * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
+     * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
+     *
+     * @param legacyRepositoryDirectory the directory of the legacy repository. 
+     * @param destinationRepositoryDirectory the directory of the modern repository.
+     * @param fileExclusionPatterns the list of patterns to exclude from the conversion.
+     * @param includeSnapshots true to include snapshots in conversion or not.
+     * @throws RepositoryConversionException 
+     */
+    void convertLegacyRepository( File legacyRepositoryDirectory, File destinationRepositoryDirectory,
+                                  List fileExclusionPatterns, boolean includeSnapshots )
+        throws RepositoryConversionException;
+    
+    /**
+     * Add a listener to the conversion process.
+     * 
+     * @param listener the listener to add.
+     */
+    void addConversionListener( ConversionListener listener );
+    
+    /**
+     * Remove a listener from the conversion process.
+     * 
+     * @param listener the listener to remove.
+     */
+    void removeConversionListener( ConversionListener listener );
+}
diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java
new file mode 100644 (file)
index 0000000..dbd54f6
--- /dev/null
@@ -0,0 +1,44 @@
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter" );
+        //$JUnit-BEGIN$
+        suite.addTest( org.apache.maven.archiva.converter.transaction.AllTests.suite() );
+        suite.addTestSuite( RepositoryConverterTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java
new file mode 100644 (file)
index 0000000..92b7006
--- /dev/null
@@ -0,0 +1,163 @@
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * MockConversionListener 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MockConversionListener
+    implements ConversionListener
+{
+    private Map warnings = new HashMap();
+
+    private Map errors = new HashMap();
+
+    private Map exceptions = new HashMap();
+
+    private List processed = new ArrayList();
+
+    private List repositories = new ArrayList();
+
+    public void conversionEvent( ConversionEvent event )
+    {
+        switch ( event.getType() )
+        {
+            case ConversionEvent.STARTED:
+                addUnique( repositories, event.getRepository() );
+                break;
+            case ConversionEvent.PROCESSED:
+                addUnique( processed, event.getArtifact() );
+                break;
+            case ConversionEvent.WARNING:
+                if ( event.getException() != null )
+                {
+                    addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
+                }
+
+                if ( event.getMessage() != null )
+                {
+                    addObjectList( warnings, toKey( event.getArtifact() ), event.getMessage() );
+                }
+                break;
+            case ConversionEvent.ERROR:
+                if ( event.getException() != null )
+                {
+                    addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
+                }
+
+                if ( event.getMessage() != null )
+                {
+                    addObjectList( errors, toKey( event.getArtifact() ), event.getMessage() );
+                }
+                break;
+            case ConversionEvent.FINISHED:
+                addUnique( repositories, event.getRepository() );
+                break;
+        }
+    }
+
+    public String toKey( Artifact artifact )
+    {
+        return StringUtils.defaultString( artifact.getGroupId() ) + ":"
+            + StringUtils.defaultString( artifact.getArtifactId() ) + ":"
+            + StringUtils.defaultString( artifact.getVersion() ) + ":" + StringUtils.defaultString( artifact.getType() )
+            + ":" + StringUtils.defaultString( artifact.getClassifier() );
+    }
+
+    private void addObjectList( Map map, String key, Object value )
+    {
+        List objlist = (List) map.get( key );
+        if ( objlist == null )
+        {
+            objlist = new ArrayList();
+        }
+
+        objlist.add( value );
+
+        map.put( key, objlist );
+    }
+
+    private void addUnique( Collection collection, Object obj )
+    {
+        if ( !collection.contains( obj ) )
+        {
+            collection.add( obj );
+        }
+    }
+
+    public Map getErrors()
+    {
+        return errors;
+    }
+
+    public Map getExceptions()
+    {
+        return exceptions;
+    }
+
+    public List getProcessed()
+    {
+        return processed;
+    }
+
+    public List getRepositories()
+    {
+        return repositories;
+    }
+
+    public Map getWarnings()
+    {
+        return warnings;
+    }
+
+    private int getObjectListCount( Map map )
+    {
+        int count = 0;
+        for ( Iterator it = map.values().iterator(); it.hasNext(); )
+        {
+            List objList = (List) it.next();
+            count += objList.size();
+        }
+        return count;
+    }
+
+    public int getWarningMessageCount()
+    {
+        return getObjectListCount( warnings );
+    }
+
+    public int getErrorMessageCount()
+    {
+        return getObjectListCount( errors );
+    }
+}
index 15d5f187c554db10a3feff17d9699c6e00b6d5e5..8e0c676e36e6e7071678d6364bb7c1cbe51d340d 100644 (file)
@@ -20,10 +20,6 @@ package org.apache.maven.archiva.converter;
  */
 
 import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.metadata.ArtifactMetadata;
@@ -42,6 +38,8 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.regex.Matcher;
 
 /**
@@ -64,8 +62,6 @@ public class RepositoryConverterTest
 
     private ArtifactFactory artifactFactory;
 
-    private ReportingDatabase reportingDatabase;
-
     private static final int SLEEP_MILLIS = 100;
 
     private I18N i18n;
@@ -80,25 +76,28 @@ public class RepositoryConverterTest
         ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
 
         File sourceBase = getTestFile( "src/test/source-repository" );
-        sourceRepository =
-            factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+        sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+                                                             null );
 
         layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
 
         File targetBase = getTestFile( "target/test-target-repository" );
         copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
 
-        targetRepository =
-            factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null );
+        targetRepository = factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null,
+                                                             null );
 
         repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
 
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
 
         i18n = (I18N) lookup( I18N.ROLE );
+    }
 
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+    protected void tearDown()
+        throws Exception
+    {
+        super.tearDown();
     }
 
     private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
@@ -135,8 +134,8 @@ public class RepositoryConverterTest
                 {
                     if ( !destination.exists() && !destination.mkdirs() )
                     {
-                        throw new IOException(
-                            "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+                        throw new IOException( "Could not create destination directory '"
+                            + destination.getAbsolutePath() + "'." );
                     }
                     copyDirectoryStructure( file, destination );
                 }
@@ -155,20 +154,23 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File versionMetadataFile = new File( targetRepository.getBasedir(),
-                                             targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+        File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         artifactFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkSuccess(listener);
 
         assertTrue( "Check artifact created", artifactFile.exists() );
         assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
@@ -200,17 +202,20 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File versionMetadataFile = new File( targetRepository.getBasedir(),
-                                             targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+        File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkSuccess(listener);
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -241,22 +246,21 @@ public class RepositoryConverterTest
     {
         Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File versionMetadataFile = new File( targetRepository.getBasedir(),
-                                             targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+        File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        repositoryConverter.convert( artifact, targetRepository );
         //checkSuccess();  --> commented until MNG-2100 is fixed
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check if relocated artifact created", artifactFile.exists() );
-        assertTrue( "Check if relocated artifact matches",
-                    FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+        assertTrue( "Check if relocated artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
         Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
         File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
         File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
@@ -276,19 +280,20 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File versionMetadataFile = new File( targetRepository.getBasedir(),
-                                             targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+        File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( versionMetadata ) );
         versionMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 2 );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -311,17 +316,20 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v4artifact", version );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File snapshotMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+        File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -354,17 +362,20 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File snapshotMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+        File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -410,11 +421,10 @@ public class RepositoryConverterTest
     public void testMavenOnePluginConversion()
         throws Exception
     {
-        Artifact artifact =
-            createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
-        artifact.setFile(
-            new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+        Artifact artifact = createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0",
+                                            "maven-plugin" );
+        artifact.setFile( new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
+        repositoryConverter.convert( artifact, targetRepository );
         // There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
         // the plugin is being coverted correctly.
         //checkSuccess();
@@ -424,14 +434,14 @@ public class RepositoryConverterTest
         assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
 
         /*
-        The POM isn't needed for Maven 1.x plugins but the raw conversion for  
-
-        artifact = createPomArtifact( artifact );
-        File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-        File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
-        assertTrue( "Check POM created", pomFile.exists() );
-        compareFiles( expectedPomFile, pomFile );
-        */
+         The POM isn't needed for Maven 1.x plugins but the raw conversion for  
+
+         artifact = createPomArtifact( artifact );
+         File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+         File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
+         assertTrue( "Check POM created", pomFile.exists() );
+         compareFiles( expectedPomFile, pomFile );
+         */
     }
 
     public void testV3TimestampedSnapshotPomConvert()
@@ -441,17 +451,20 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         artifactMetadataFile.delete();
 
         ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File snapshotMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+        File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
         snapshotMetadataFile.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -483,11 +496,13 @@ public class RepositoryConverterTest
         // test that a POM is not created when there was none at the source
 
         Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 1 );
+        
+        assertHasWarningReason( listener, getI18nString( "warning.missing.pom" ) );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -510,15 +525,19 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 1, 0 );
+        
+        assertHasErrorReason( listener, getI18nString( "failure.incorrect.md5" ) );
 
         assertFalse( "Check artifact not created", file.exists() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -531,15 +550,19 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 1, 0 );
+        
+        assertHasErrorReason( listener, getI18nString( "failure.incorrect.sha1" ) );
 
         assertFalse( "Check artifact not created", file.exists() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -568,8 +591,11 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         compareFiles( sourceFile, targetFile );
         compareFiles( sourcePomFile, targetPomFile );
@@ -604,17 +630,20 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
-                      getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 1, 0 );
+
+        assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
 
         assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
         assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -641,8 +670,11 @@ public class RepositoryConverterTest
         sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
         sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         compareFiles( sourceFile, targetFile );
         compareFiles( sourcePomFile, targetPomFile );
@@ -651,8 +683,8 @@ public class RepositoryConverterTest
         assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertTrue( "Check metadata created", metadataFile.exists() );
     }
 
@@ -671,8 +703,11 @@ public class RepositoryConverterTest
         File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         assertTrue( "Check source file exists", sourceFile.exists() );
         assertTrue( "Check source POM exists", sourcePomFile.exists() );
@@ -681,8 +716,8 @@ public class RepositoryConverterTest
         assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -713,17 +748,20 @@ public class RepositoryConverterTest
         // Need to guarantee last modified is not equal
         Thread.sleep( SLEEP_MILLIS );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
-                      getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkFailure(listener);
+        
+        assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
 
         assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
         assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -734,20 +772,38 @@ public class RepositoryConverterTest
 
         Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
 
         ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        File versionMetadataFile = new File( targetRepository.getBasedir(),
-                                             targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+        File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( versionMetadata ) );
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkCounts( listener, 1, 0 );
+        
+        List messages = (List) listener.getErrors().get( listener.toKey( artifact ));
+        assertNotNull("Should have error messages.");
+        
+        boolean found = false;
         String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
-        assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
+        for ( Iterator it = messages.iterator(); it.hasNext(); )
+        {
+            String reason = (String) it.next();
+            if( reason.matches( pattern ) )
+            {
+                found = true;
+                break;
+            }
+        }
+        
+        assertTrue( "Check failure message.", found );
 
         assertFalse( "check artifact rolled back", artifactFile.exists() );
         assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
@@ -763,10 +819,12 @@ public class RepositoryConverterTest
         artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
         artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
         artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
-        repositoryConverter.convert( artifacts, targetRepository, reportingDatabase );
-        assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifacts, targetRepository );
+        checkCounts( listener, 0, 0 );
 
         for ( Iterator i = artifacts.iterator(); i.hasNext(); )
         {
@@ -778,8 +836,8 @@ public class RepositoryConverterTest
 
             artifact = createPomArtifact( artifact );
             File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-            File expectedPomFile =
-                getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" );
+            File expectedPomFile = getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId()
+                + ".pom" );
             assertTrue( "Check POM created", pomFile.exists() );
 
             compareFiles( expectedPomFile, pomFile );
@@ -797,16 +855,19 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
-                      getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkFailure(listener);
+        
+        assertHasErrorReason( listener, getI18nString( "failure.incorrect.artifactMetadata.versions" ) );
 
         assertFalse( "Check artifact not created", file.exists() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -821,16 +882,19 @@ public class RepositoryConverterTest
         File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         file.delete();
 
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkFailure();
-        assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
-                      getFailure().getReason() );
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkFailure(listener);
+        
+        assertHasErrorReason( listener, getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ) );
 
         assertFalse( "Check artifact not created", file.exists() );
 
         ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
-        File metadataFile =
-            new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+        File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( metadata ) );
         assertFalse( "Check metadata not created", metadataFile.exists() );
     }
 
@@ -840,9 +904,11 @@ public class RepositoryConverterTest
         // test artifact level metadata is merged when it already exists on successful conversion
 
         Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
-
-        repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
-        checkSuccess();
+        MockConversionListener listener = new MockConversionListener();
+        
+        repositoryConverter.addConversionListener( listener );
+        repositoryConverter.convert( artifact, targetRepository );
+        checkSuccess(listener);
 
         File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
         assertTrue( "Check artifact created", artifactFile.exists() );
@@ -856,8 +922,8 @@ public class RepositoryConverterTest
         compareFiles( sourcePomFile, pomFile );
 
         ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
-        File artifactMetadataFile = new File( targetRepository.getBasedir(),
-                                              targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+        File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+            .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
         assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
 
         File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
@@ -872,14 +938,14 @@ public class RepositoryConverterTest
 
         ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
 
-        sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(),
-                                                             targetRepository.getLayout(), null, null );
+        sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), targetRepository
+            .getLayout(), null, null );
 
         Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
 
         try
         {
-            repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+            repositoryConverter.convert( artifact, targetRepository );
             fail( "Should have failed trying to convert within the same repository" );
         }
         catch ( RepositoryConversionException e )
@@ -905,8 +971,7 @@ public class RepositoryConverterTest
         return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
     }
 
-    private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
-                                     String type )
+    private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, String type )
     {
         Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
         artifact.setBaseVersion( baseVersion );
@@ -917,8 +982,8 @@ public class RepositoryConverterTest
 
     private Artifact createPomArtifact( Artifact artifact )
     {
-        return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(),
-                               artifact.getVersion(), "pom" );
+        return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact
+            .getVersion(), "pom" );
     }
 
     private static void compareFiles( File expectedPomFile, File pomFile )
@@ -926,8 +991,7 @@ public class RepositoryConverterTest
     {
         String expectedContent = normalizeString( FileUtils.readFileToString( expectedPomFile, null ) );
         String targetContent = normalizeString( FileUtils.readFileToString( pomFile, null ) );
-        assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent,
-                      targetContent );
+        assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, targetContent );
     }
 
     private static String normalizeString( String path )
@@ -935,35 +999,79 @@ public class RepositoryConverterTest
         return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
     }
 
-    private void checkSuccess()
+    private void checkSuccess(MockConversionListener listener)
     {
-        assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        checkCounts( listener, 0, 0 );
     }
 
-    private void checkFailure()
+    private void checkFailure(MockConversionListener listener)
     {
-        assertEquals( "check num errors", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        checkCounts( listener, 1, 0 );
+    }
+
+    private void checkCounts( MockConversionListener listener, int failures, int warnings )
+    {
+        int actualFailures = listener.getErrorMessageCount();
+        int actualWarnings = listener.getWarningMessageCount();
+
+        if ( ( failures != actualFailures ) || ( warnings != actualWarnings ) )
+        {
+            fail( "Check Results Counts expected:<" + failures + "," + warnings + "> but was:<" + actualFailures + ","
+                + actualWarnings + ">" );
+        }
     }
 
     private String getI18nString( String key )
     {
         return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
     }
-
-    private Result getFailure()
+    
+    private void assertHasWarningReason( MockConversionListener listener, String reason )
     {
-        ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
-        return (Result) artifact.getFailures().get( 0 );
+        assertHasMessage( listener.getWarnings(), "warning", reason );
     }
 
-    private Result getWarning()
+    private void assertHasErrorReason( MockConversionListener listener, String reason )
     {
-        ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
-        return (Result) artifact.getWarnings().get( 0 );
+        assertHasMessage( listener.getErrors(), "error", reason );
+    }
+    
+    private void assertHasMessage( Map map, String type, String message )
+    {
+        if ( ( map == null ) || ( map.isEmpty() ) )
+        {
+            fail( "No " + type + "s captured, expected " + type + " <" + message + ">" );
+        }
+
+        // Attempt to find the message ...
+        for ( Iterator it = map.values().iterator(); it.hasNext(); )
+        {
+            List msgList = (List) it.next();
+
+            if ( msgList.contains( message ) )
+            {
+                // Found it!
+                return;
+            }
+        }
+
+        // Didn't find it! whoops ...
+        for ( Iterator it = map.entrySet().iterator(); it.hasNext(); )
+        {
+            Map.Entry entry = (Entry) it.next();
+            String key = (String) entry.getKey();
+            List msgList = (List) entry.getValue();
+
+            System.err.println( " Artifact: " + key );
+
+            for ( Iterator itMsgs = msgList.iterator(); itMsgs.hasNext(); )
+            {
+                String msg = (String) itMsgs.next();
+                System.err.println( "           " + msg );
+            }
+        }
+
+        fail( "Unable to find " + type + " reason <" + message + "> in any artifact." );
     }
 
     private void createModernSourceRepository()
@@ -974,7 +1082,7 @@ public class RepositoryConverterTest
         ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
 
         File sourceBase = getTestFile( "src/test/source-modern-repository" );
-        sourceRepository =
-            factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+        sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+                                                             null );
     }
 }
diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java
new file mode 100644 (file)
index 0000000..9274035
--- /dev/null
@@ -0,0 +1,44 @@
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter.transaction" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( CreateFileEventTest.class );
+        suite.addTestSuite( CopyFileEventTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-converter/src/test/resources/log4j.properties b/archiva-converter/src/test/resources/log4j.properties
new file mode 100644 (file)
index 0000000..9b2c26a
--- /dev/null
@@ -0,0 +1,10 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
index 1007fd83597406fa333f09b68dc366496ccc6d4b..b1cf9640a268e20bea30486bbfee34c091ccc579 100644 (file)
   ~ KIND, either express or implied.  See the License for the
   ~ specific language governing permissions and limitations
   ~ under the License.
-  -->
+-->
 
 <component-set>
   <components>
+
     <component>
       <role>org.apache.maven.archiva.converter.RepositoryConverter</role>
       <implementation>org.apache.maven.archiva.converter.DefaultRepositoryConverter</implementation>
           <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
           <field-name>artifactFactory</field-name>
         </requirement>
-        <requirement>
-          <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
-          <field-name>rewriter</field-name>
-        </requirement>
         <requirement>
           <role>org.codehaus.plexus.i18n.I18N</role>
           <field-name>i18n</field-name>
           <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
           <field-name>artifactFactory</field-name>
         </requirement>
-        <requirement>
-          <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
-          <field-name>rewriter</field-name>
-        </requirement>
         <requirement>
           <role>org.codehaus.plexus.i18n.I18N</role>
           <field-name>i18n</field-name>
         </requirement>
       </requirements>
     </component>
+
+
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+      
+      <configuration>
+        <!-- Database Configuration -->
+        <driverName>org.hsqldb.jdbcDriver</driverName>
+        <url>jdbc:hsqldb:mem:TESTDB</url>
+        <userName>sa</userName>
+        <password></password>
+        
+        <!-- JPOX and JDO configuration -->
+        <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+        <otherProperties>
+          <property>
+            <name>javax.jdo.PersistenceManagerFactoryClass</name>
+            <value>org.jpox.PersistenceManagerFactoryImpl</value>
+          </property>
+          <property>
+            <name>org.jpox.autoCreateSchema</name>
+            <value>true</value>
+          </property>
+          <property>
+            <name>org.jpox.validateTables</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateConstraints</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateColumns</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.autoStartMechanism</name>
+            <value>None</value>
+          </property>
+          <property>
+            <name>org.jpox.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.poid.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.rdbms.dateTimezone</name>
+            <value>JDK_DEFAULT_TIMEZONE</value>
+          </property>
+        </otherProperties>
+      </configuration>
+    </component>
   </components>
 </component-set>
index da35be2b9b9497cf380f661fc763defd51001eef..ed730ec37262a78b50fbb2943c77eeadfb38be6d 100644 (file)
       <groupId>org.apache.maven.archiva</groupId>
       <artifactId>archiva-configuration</artifactId>
     </dependency>
-    <dependency>
-      <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-converter</artifactId>
-    </dependency>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
       <artifactId>archiva-discoverer</artifactId>
       <version>1.0-alpha-1</version>
       <scope>test</scope>
     </dependency>
+    <!-- TEST DEPS -->
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <version>1.7.3.3</version>
+      <scope>test</scope>
+    </dependency>
     <!-- needed for PlexusTestCase -->
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
   <build>
     <plugins>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>cobertura-maven-plugin</artifactId>
-        <!-- TODO! add unit tests -->
-        <configuration>
-          <instrumentation>
-            <excludes>
-              <exclude>**/**</exclude>
-            </excludes>
-          </instrumentation>
-        </configuration>
+        <groupId>org.codehaus.plexus</groupId>
+        <artifactId>plexus-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>merge</id>
+            <goals>
+              <goal>merge-descriptors</goal>
+            </goals>
+            <configuration>
+              <descriptors>
+                <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+                <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+        </executions>
       </plugin>
     </plugins>
   </build>
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java
deleted file mode 100644 (file)
index 940b6bd..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * ManagedArtifact 
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedArtifact
-{
-    private String repositoryId;
-
-    private Artifact artifact;
-
-    private String path;
-
-    protected Map attached;
-
-    public ManagedArtifact( String repoId, Artifact artifact, String path )
-    {
-        super();
-        this.repositoryId = repoId;
-        this.artifact = artifact;
-        this.path = path;
-        this.attached = new HashMap();
-    }
-
-    public Artifact getArtifact()
-    {
-        return artifact;
-    }
-
-    public String getPath()
-    {
-        return path;
-    }
-
-    public String getRepositoryId()
-    {
-        return repositoryId;
-    }
-
-    public Map getAttached()
-    {
-        return attached;
-    }
-
-    public void setAttached( Map attached )
-    {
-        this.attached = attached;
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java
deleted file mode 100644 (file)
index 6cccfcd..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * ManagedArtifactTypes - provides place to test an unknown artifact type.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedArtifactTypes
-{
-    public static final int GENERIC = 0;
-
-    public static final int JAVA = 1;
-
-    public static final int EJB = 2;
-
-    private static List javaArtifacts;
-
-    private static List ejbArtifacts;
-
-    static
-    {
-        javaArtifacts = new ArrayList();
-        javaArtifacts.add( "jar" );
-        javaArtifacts.add( "war" );
-        javaArtifacts.add( "sar" );
-        javaArtifacts.add( "rar" );
-        javaArtifacts.add( "ear" );
-
-        ejbArtifacts = new ArrayList();
-        ejbArtifacts.add( "ejb" );
-        ejbArtifacts.add( "ejb-client" );
-    }
-
-    public static int whichType( String type )
-    {
-        if ( StringUtils.isBlank( type ) )
-        {
-            // TODO: is an empty type even possible?
-            return GENERIC;
-        }
-
-        type = type.toLowerCase();
-
-        if ( ejbArtifacts.contains( type ) )
-        {
-            return EJB;
-        }
-
-        if ( javaArtifacts.contains( type ) )
-        {
-            return JAVA;
-        }
-
-        return GENERIC;
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java
deleted file mode 100644 (file)
index eacf1cb..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too. 
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedEjbArtifact
-    extends ManagedJavaArtifact
-{
-    public static final String CLIENT = "client";
-
-    public ManagedEjbArtifact( String repoId, Artifact artifact, String path )
-    {
-        super( repoId, artifact, path );
-    }
-
-    public String getClientPath()
-    {
-        return (String) super.attached.get( CLIENT );
-    }
-
-    public void setClientPath( String clientPath )
-    {
-        super.attached.put( CLIENT, clientPath );
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java
deleted file mode 100644 (file)
index babb884..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source 
- * reference jars.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedJavaArtifact
-    extends ManagedArtifact
-{
-    public static final String JAVADOC = "javadoc";
-
-    public static final String SOURCES = "sources";
-
-    public ManagedJavaArtifact( String repoId, Artifact artifact, String path )
-    {
-        super( repoId, artifact, path );
-    }
-
-    public String getJavadocPath()
-    {
-        return (String) super.attached.get( JAVADOC );
-    }
-
-    public void setJavadocPath( String javadocPath )
-    {
-        super.attached.put( JAVADOC, javadocPath );
-    }
-
-    public String getSourcesPath()
-    {
-        return (String) super.attached.get( SOURCES );
-    }
-
-    public void setSourcesPath( String sourcesPath )
-    {
-        super.attached.put( SOURCES, sourcesPath );
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java
new file mode 100644 (file)
index 0000000..7b6d15f
--- /dev/null
@@ -0,0 +1,97 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+
+import java.util.Collections;
+
+/**
+ * ArtifactHealthConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ *     role-hint="artifact-health"
+ *     instantiation-strategy="per-lookup"
+ */
+public class ArtifactHealthConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
+    /**
+     * @plexus.requirement role-hint="health"
+     */
+    private ReportGroup health;
+
+    /**
+     * @plexus.requirement
+     */
+    private MavenProjectBuilder projectBuilder;
+
+    public void processArtifact( Artifact artifact, BaseFile file )
+    {
+        Model model = null;
+        try
+        {
+            Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+                .getArtifactId(), artifact.getVersion() );
+            MavenProject project = projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+            model = project.getModel();
+        }
+        catch ( InvalidArtifactRTException e )
+        {
+            database.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
+        }
+        catch ( ProjectBuildingException e )
+        {
+            database.addWarning( artifact, null, null, "Error reading project model: " + e );
+        }
+        
+        database.remove( artifact );
+        health.processArtifact( artifact, model );
+    }
+
+    public void processFileProblem( BaseFile path, String message )
+    {
+        /* do nothing here (yet) */
+        // TODO: store build failure into database?
+    }
+    
+    public String getName()
+    {
+        return "Artifact Health Consumer";
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java
new file mode 100644 (file)
index 0000000..2d7026b
--- /dev/null
@@ -0,0 +1,99 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * IndexArtifactConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ *     role-hint="index-artifact"
+ *     instantiation-strategy="per-lookup"
+ */
+public class IndexArtifactConsumer
+    extends GenericArtifactConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryArtifactIndexFactory indexFactory;
+    
+    /**
+     * @plexus.requirement role-hint="standard"
+     */
+    private RepositoryIndexRecordFactory recordFactory;
+
+    /**
+     * Configuration store.
+     *
+     * @plexus.requirement
+     */
+    private ArchivaConfiguration archivaConfiguration;
+
+    private RepositoryArtifactIndex index;
+
+    public boolean init( ArtifactRepository repository )
+    {
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        File indexPath = new File( configuration.getIndexPath() );
+
+        index = indexFactory.createStandardIndex( indexPath );
+
+        return super.init( repository );
+    }
+
+    public void processArtifact( Artifact artifact, BaseFile file )
+    {
+        try
+        {
+            index.indexArtifact( artifact, recordFactory );
+        }
+        catch ( RepositoryIndexException e )
+        {
+            getLogger().warn( "Unable to index artifact " + artifact, e );
+        }
+    }
+
+    public void processFileProblem( BaseFile path, String message )
+    {
+
+    }
+    
+    public String getName()
+    {
+        return "Index Artifact Consumer";
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java
new file mode 100644 (file)
index 0000000..d858d32
--- /dev/null
@@ -0,0 +1,69 @@
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericRepositoryMetadataConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * RepositoryMetadataHealthConsumer 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ *     role-hint="metadata-health"
+ *     instantiation-strategy="per-lookup"
+ */
+public class RepositoryMetadataHealthConsumer
+    extends GenericRepositoryMetadataConsumer
+{
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
+    /**
+     * @plexus.requirement role-hint="health"
+     */
+    private ReportGroup health;
+
+    public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file )
+    {
+        MetadataResults results = database.getMetadataResults( metadata );
+        database.clearResults( results );
+
+        health.processMetadata( metadata, repository );
+    }
+    
+    public void processFileProblem( BaseFile path, String message )
+    {
+        
+    }
+    
+    public String getName()
+    {
+        return "RepositoryMetadata Health Consumer";
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java b/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java
deleted file mode 100644 (file)
index 7beb18a..0000000
+++ /dev/null
@@ -1,126 +0,0 @@
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.converter.RepositoryConverter;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- * @plexus.component
- * @todo turn this into a general conversion component and hide all this crap here.
- * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
- */
-public class DefaultLegacyRepositoryConverter
-    implements LegacyRepositoryConverter
-{
-    /**
-     * @plexus.requirement role-hint="legacy"
-     */
-    private ArtifactDiscoverer artifactDiscoverer;
-
-    /**
-     * @plexus.requirement role-hint="legacy"
-     */
-    private ArtifactRepositoryLayout legacyLayout;
-
-    /**
-     * @plexus.requirement role-hint="default"
-     */
-    private ArtifactRepositoryLayout defaultLayout;
-
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactRepositoryFactory artifactRepositoryFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryConverter repositoryConverter;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportingStore reportingStore;
-
-    /**
-     * @plexus.requirement role-hint="health"
-     */
-    private ReportGroup reportGroup;
-
-    public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
-                                         List blacklistedPatterns, boolean includeSnapshots )
-        throws RepositoryConversionException, DiscovererException
-    {
-        ArtifactRepository legacyRepository;
-
-        ArtifactRepository repository;
-
-        try
-        {
-            legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
-                                                                                   legacyRepositoryDirectory.toURI().toURL().toString(),
-                                                                                   legacyLayout, null, null );
-
-            repository = artifactRepositoryFactory.createArtifactRepository( "default",
-                                                                             repositoryDirectory.toURI().toURL().toString(),
-                                                                             defaultLayout, null, null );
-        }
-        catch ( MalformedURLException e )
-        {
-            throw new RepositoryConversionException( "Error convering legacy repository.", e );
-        }
-
-        ArtifactFilter filter =
-            includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter();
-        List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, blacklistedPatterns, filter );
-
-        ReportingDatabase reporter;
-        try
-        {
-            reporter = reportingStore.getReportsFromStore( repository, reportGroup );
-
-            repositoryConverter.convert( legacyArtifacts, repository, reporter );
-
-            reportingStore.storeReports( reporter, repository );
-        }
-        catch ( ReportingStoreException e )
-        {
-            throw new RepositoryConversionException( "Error convering legacy repository.", e );
-        }
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java b/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java
deleted file mode 100644 (file)
index 8764214..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- */
-public interface LegacyRepositoryConverter
-{
-    String ROLE = LegacyRepositoryConverter.class.getName();
-
-    /**
-     * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
-     * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
-     *
-     * @param legacyRepositoryDirectory
-     * @param repositoryDirectory
-     * @throws org.apache.maven.archiva.converter.RepositoryConversionException
-     *
-     */
-    void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, List blacklistedPatterns,
-                                  boolean includeSnapshots )
-        throws RepositoryConversionException, DiscovererException;
-}
index 554fb34914afb35d07c12550a8551666c50165b0..0bac31018bf64025eab5063e49a9f0b79101c682 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.repositories;
  * under the License.
  */
 
-import org.apache.maven.archiva.artifact.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
 import org.apache.maven.archiva.configuration.RepositoryConfiguration;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -31,7 +31,7 @@ import java.util.List;
 /**
  * ActiveManagedRepositories
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public interface ActiveManagedRepositories
@@ -44,12 +44,27 @@ public interface ActiveManagedRepositories
      * @param id the ID of the repository.
      * @return the ArtifactRepository associated with the provided ID, or null if none found.
      */
-    ArtifactRepository getArtifactRepository( String id );
+    public ArtifactRepository getArtifactRepository( String id );
 
-    List getAllArtifactRepositories();
+    /**
+     * Get the List of active managed repositories as a List of {@link ArtifactRepository} objects.
+     *  
+     * @return the list of ArtifactRepository objects.
+     */
+    public List /*<ArtifactRepository>*/getAllArtifactRepositories();
 
     RepositoryConfiguration getRepositoryConfiguration( String id );
 
+    /**
+     * Providing only a groupId, artifactId, and version, return the MavenProject that
+     * is found, in any managed repository.
+     * 
+     * @param groupId the groupId to search for
+     * @param artifactId the artifactId to search for
+     * @param version the version to search for
+     * @return the MavenProject from the provided parameters.
+     * @throws ProjectBuildingException if there was a problem building the maven project object.
+     */
     MavenProject findProject( String groupId, String artifactId, String version )
         throws ProjectBuildingException;
 
@@ -59,4 +74,20 @@ public interface ActiveManagedRepositories
     ManagedArtifact findArtifact( String groupId, String artifactId, String version, String type );
 
     ManagedArtifact findArtifact( Artifact artifact );
+
+    /**
+     * Obtain the last data refresh timestamp for all Managed Repositories.
+     * 
+     * @return the last data refresh timestamp.
+     */
+    long getLastDataRefreshTime();
+
+    /**
+     * Tests to see if there needs to be a data refresh performed.
+     * 
+     * The only valid scenario is if 1 or more repositories have not had their data refreshed ever. 
+     * 
+     * @return true if there needs to be a data refresh.
+     */
+    boolean needsDataRefresh();
 }
index 128a91d66e2f1abc940c23da0146e7c531c1b30a..6b25047832a8810609fe65d228980f2eba59282a 100644 (file)
@@ -20,14 +20,15 @@ package org.apache.maven.archiva.repositories;
  */
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.artifact.ManagedArtifact;
-import org.apache.maven.archiva.artifact.ManagedArtifactTypes;
-import org.apache.maven.archiva.artifact.ManagedEjbArtifact;
-import org.apache.maven.archiva.artifact.ManagedJavaArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifactTypes;
+import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact;
 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
 import org.apache.maven.archiva.configuration.Configuration;
 import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
 import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -49,7 +50,7 @@ import java.util.List;
 /**
  * DefaultActiveManagedRepositories
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories"
  */
@@ -224,6 +225,7 @@ public class DefaultActiveManagedRepositories
 
         repositories = repositoryFactory.createRepositories( this.configuration );
         localRepository = repositoryFactory.createLocalRepository( this.configuration );
+
     }
 
     private ManagedArtifact createManagedArtifact( ArtifactRepository repository, Artifact artifact, File f )
@@ -283,8 +285,9 @@ public class DefaultActiveManagedRepositories
     {
         if ( propertyName.startsWith( "repositories" ) || propertyName.startsWith( "localRepository" ) )
         {
-            getLogger().debug( "Triggering managed repository configuration change with " + propertyName + " set to " +
-                propertyValue );
+            getLogger().debug(
+                               "Triggering managed repository configuration change with " + propertyName + " set to "
+                                   + propertyValue );
             configureSelf( archivaConfiguration.getConfiguration() );
         }
         else
@@ -292,4 +295,39 @@ public class DefaultActiveManagedRepositories
             getLogger().debug( "Not triggering managed repository configuration change with " + propertyName );
         }
     }
+
+    public long getLastDataRefreshTime()
+    {
+        long lastDataRefreshTime = 0;
+
+        for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+        {
+            ArtifactRepository repository = (ArtifactRepository) i.next();
+
+            DiscovererStatistics stats = new DiscovererStatistics( repository );
+            if ( stats.getTimestampFinished() > lastDataRefreshTime )
+            {
+                lastDataRefreshTime = stats.getTimestampFinished();
+            }
+        }
+
+        return lastDataRefreshTime;
+    }
+
+    public boolean needsDataRefresh()
+    {
+        for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+        {
+            ArtifactRepository repository = (ArtifactRepository) i.next();
+
+            DiscovererStatistics stats = new DiscovererStatistics( repository );
+            if ( stats.getTimestampFinished() <= 0 )
+            {
+                // Found a repository that has NEVER had it's data walked.
+                return true;
+            }
+        }
+
+        return false;
+    }
 }
index 04ab088d4daae4c947d02e4e642563adb4022d5f..bae3517506445a82728e2f63d0a01eaea8a69fd7 100644 (file)
@@ -21,11 +21,8 @@ package org.apache.maven.archiva.scheduler;
 
 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
 import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
 import org.apache.maven.archiva.scheduler.task.RepositoryTask;
 import org.codehaus.plexus.logging.AbstractLogEnabled;
 import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
@@ -42,7 +39,6 @@ import org.quartz.JobDataMap;
 import org.quartz.JobDetail;
 import org.quartz.SchedulerException;
 
-import java.io.File;
 import java.text.ParseException;
 
 /**
@@ -61,28 +57,23 @@ public class DefaultRepositoryTaskScheduler
     private Scheduler scheduler;
 
     /**
-     * @plexus.requirement role-hint="indexer"
+     * @plexus.requirement role-hint="data-refresh"
      */
-    private TaskQueue indexerQueue;
-
-    /**
-     * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
-     */
-    private IndexerTaskExecutor indexerTaskExecutor;
+    private TaskQueue datarefreshQueue;
 
     /**
      * @plexus.requirement
      */
     private ArchivaConfiguration archivaConfiguration;
-
+    
     /**
      * @plexus.requirement
      */
-    private RepositoryArtifactIndexFactory indexFactory;
+    private ActiveManagedRepositories activeRepositories;
 
     private static final String DISCOVERER_GROUP = "DISCOVERER";
 
-    private static final String INDEXER_JOB = "indexerTask";
+    private static final String DATA_REFRESH_JOB = "dataRefreshTask";
 
     public void start()
         throws StartingException
@@ -92,11 +83,11 @@ public class DefaultRepositoryTaskScheduler
 
         try
         {
-            scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+            scheduleJobs( configuration.getDataRefreshCronExpression() );
         }
         catch ( ParseException e )
         {
-            throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e );
+            throw new StartingException( "Invalid configuration: " + configuration.getDataRefreshCronExpression(), e );
         }
         catch ( SchedulerException e )
         {
@@ -104,29 +95,22 @@ public class DefaultRepositoryTaskScheduler
         }
     }
 
-    private void scheduleJobs( String indexPath, String indexerCronExpression )
+    private void scheduleJobs( String indexerCronExpression )
         throws ParseException, SchedulerException
     {
-        if ( indexPath != null )
-        {
-            JobDetail jobDetail = createJobDetail( INDEXER_JOB );
+        JobDetail jobDetail = createJobDetail( DATA_REFRESH_JOB );
 
-            getLogger().info( "Scheduling indexer: " + indexerCronExpression );
-            CronTrigger trigger = new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
-            scheduler.scheduleJob( jobDetail, trigger );
+        getLogger().info( "Scheduling data-refresh: " + indexerCronExpression );
+        CronTrigger trigger = new CronTrigger( DATA_REFRESH_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
+        scheduler.scheduleJob( jobDetail, trigger );
 
-            try
-            {
-                queueNowIfNeeded();
-            }
-            catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
-            {
-                getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
-            }
+        try
+        {
+            queueNowIfNeeded();
         }
-        else
+        catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
         {
-            getLogger().info( "Not scheduling indexer - index path is not configured" );
+            getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
         }
     }
 
@@ -135,7 +119,7 @@ public class DefaultRepositoryTaskScheduler
         JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
 
         JobDataMap dataMap = new JobDataMap();
-        dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
+        dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
         dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
         jobDetail.setJobDataMap( dataMap );
 
@@ -147,7 +131,7 @@ public class DefaultRepositoryTaskScheduler
     {
         try
         {
-            scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP );
+            scheduler.unscheduleJob( DATA_REFRESH_JOB, DISCOVERER_GROUP );
         }
         catch ( SchedulerException e )
         {
@@ -163,7 +147,7 @@ public class DefaultRepositoryTaskScheduler
 
     public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
     {
-        if ( "indexPath".equals( propertyName ) || "indexerCronExpression".equals( propertyName ) )
+        if ( "dataRefreshCronExpression".equals( propertyName ) )
         {
             getLogger().debug( "Restarting task scheduler with new configuration after property change: " +
                 propertyName + " to " + propertyValue );
@@ -179,7 +163,7 @@ public class DefaultRepositoryTaskScheduler
             try
             {
                 Configuration configuration = archivaConfiguration.getConfiguration();
-                scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+                scheduleJobs( configuration.getDataRefreshCronExpression() );
             }
             catch ( ParseException e )
             {
@@ -199,43 +183,27 @@ public class DefaultRepositoryTaskScheduler
         }
     }
 
-    public void runIndexer()
-        throws org.apache.maven.archiva.scheduler.TaskExecutionException
+    public void runDataRefresh()
+        throws TaskExecutionException
     {
-        IndexerTask task = new IndexerTask();
-        task.setJobName( "INDEX_INIT" );
+        DataRefreshTask task = new DataRefreshTask();
+        task.setJobName( "DATA_REFRESH_INIT" );
         try
         {
-            indexerQueue.put( task );
+            datarefreshQueue.put( task );
         }
         catch ( TaskQueueException e )
         {
-            throw new org.apache.maven.archiva.scheduler.TaskExecutionException( e.getMessage(), e );
+            throw new TaskExecutionException( e.getMessage(), e );
         }
     }
 
     public void queueNowIfNeeded()
-        throws org.codehaus.plexus.taskqueue.execution.TaskExecutionException
+        throws TaskExecutionException
     {
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        try
+        if ( activeRepositories.needsDataRefresh() )
         {
-            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
-            if ( !artifactIndex.exists() )
-            {
-                runIndexer();
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( org.apache.maven.archiva.scheduler.TaskExecutionException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
+            runDataRefresh();
         }
     }
 
index dc3c26ace1344c7d43eb6480c1e250578bb3d1cd..06152f2446bfbf323e83d95732fa4e31e4b7323f 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.scheduler;
  * under the License.
  */
 
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
 import org.apache.maven.archiva.scheduler.task.RepositoryTask;
 import org.codehaus.plexus.scheduler.AbstractJob;
 import org.codehaus.plexus.taskqueue.TaskQueue;
@@ -53,27 +53,27 @@ public class RepositoryTaskJob
         JobDataMap dataMap = context.getJobDetail().getJobDataMap();
         setJobDataMap( dataMap );
 
-        TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+        TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
         String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
 
-        RepositoryTask task = new IndexerTask();
+        RepositoryTask task = new DataRefreshTask();
         task.setJobName( context.getJobDetail().getName() );
 
         try
         {
-            if ( indexerQueue.getQueueSnapshot().size() == 0 )
+            if ( taskQueue.getQueueSnapshot().size() == 0 )
             {
-                indexerQueue.put( task );
+                taskQueue.put( task );
             }
             else
             {
                 if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
                 {
-                    indexerQueue.put( task );
+                    taskQueue.put( task );
                 }
                 else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
                 {
-                    //do not queue anymore, policy is to skip
+                    // do not queue anymore, policy is to skip
                 }
             }
         }
index c63556ff5f57af183f87ab109c7fa5faffbfabd4..08e511f3ef887fe53bb1dbde9840406721d7001d 100644 (file)
@@ -19,6 +19,8 @@ package org.apache.maven.archiva.scheduler;
  * under the License.
  */
 
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+
 /**
  * The component that takes care of scheduling in the application.
  *
@@ -31,7 +33,7 @@ public interface RepositoryTaskScheduler
      */
     String ROLE = RepositoryTaskScheduler.class.getName();
 
-    void runIndexer()
+    void runDataRefresh()
         throws TaskExecutionException;
 
-}
+} 
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java
deleted file mode 100644 (file)
index 5164344..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-package org.apache.maven.archiva.scheduler;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring during task execution.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class TaskExecutionException
-    extends Exception
-{
-    public TaskExecutionException( String message, Throwable t )
-    {
-        super( message, t );
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java
new file mode 100644 (file)
index 0000000..783a44c
--- /dev/null
@@ -0,0 +1,61 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Mutable list of consumer for the Data Refresh.
+ * 
+ * NOTE: This class only exists to minimize the requirements of manual component management.
+ *       This approach allows for a small and simple component definition in the application.xml
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers"
+ */
+public class DataRefreshConsumers
+{
+    /**
+     * @plexus.configuration
+     */
+    private List consumerNames;
+
+    public List getConsumerNames()
+    {
+        if ( consumerNames == null )
+        {
+            consumerNames = new ArrayList();
+            consumerNames.add( "index-artifact" );
+            consumerNames.add( "artifact-health" );
+            consumerNames.add( "metadata-health" );
+        }
+
+        return consumerNames;
+    }
+
+    public Iterator iterator()
+    {
+        return getConsumerNames().iterator();
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java
new file mode 100644 (file)
index 0000000..9392772
--- /dev/null
@@ -0,0 +1,192 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.consumers.ConsumerException;
+import org.apache.maven.archiva.common.consumers.ConsumerFactory;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DataRefreshExecutor 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" 
+ *      role-hint="data-refresh"
+ */
+public class DataRefreshExecutor
+    extends AbstractLogEnabled
+    implements TaskExecutor
+{
+    /**
+     * Configuration store.
+     *
+     * @plexus.requirement
+     */
+    private ArchivaConfiguration archivaConfiguration;
+
+    /**
+     * @plexus.requirement
+     */
+    private ConfiguredRepositoryFactory repoFactory;
+
+    /**
+     * @plexus.requirement
+     */
+    private DataRefreshConsumers consumerNames;
+
+    /**
+     * @plexus.requirement
+     */
+    private Discoverer discoverer;
+
+    /**
+     * @plexus.requirement
+     */
+    private ConsumerFactory consumerFactory;
+
+    public void executeTask( Task task )
+        throws TaskExecutionException
+    {
+        DataRefreshTask indexerTask = (DataRefreshTask) task;
+
+        getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
+
+        execute();
+    }
+
+    public void execute()
+        throws TaskExecutionException
+    {
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        List consumers = new ArrayList();
+
+        for ( Iterator it = consumerNames.iterator(); it.hasNext(); )
+        {
+            String name = (String) it.next();
+            try
+            {
+                Consumer consumer = consumerFactory.createConsumer( name );
+                consumers.add( consumer );
+            }
+            catch ( ConsumerException e )
+            {
+                getLogger().warn( e.getMessage(), e );
+                throw new TaskExecutionException( e.getMessage(), e );
+            }
+        }
+
+        long time = System.currentTimeMillis();
+
+        for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+        {
+            RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+            if ( !repositoryConfiguration.isIndexed() )
+            {
+                continue;
+            }
+
+            ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+
+            List filteredConsumers = filterConsumers( consumers, repository );
+
+            DiscovererStatistics lastRunStats = new DiscovererStatistics( repository );
+            try
+            {
+                lastRunStats.load( ".datarefresh" );
+            }
+            catch ( IOException e )
+            {
+                getLogger().info(
+                                  "Unable to load last run statistics for repository [" + repository.getId() + "]: "
+                                      + e.getMessage() );
+            }
+
+            try
+            {
+                DiscovererStatistics stats = discoverer
+                    .walkRepository( repository, filteredConsumers, repositoryConfiguration.isIncludeSnapshots(),
+                                     lastRunStats.getTimestampFinished(), null, null );
+
+                stats.dump( getLogger() );
+            }
+            catch ( DiscovererException e )
+            {
+                getLogger().error(
+                                   "Unable to run data refresh against repository [" + repository.getId() + "]: "
+                                       + e.getMessage(), e );
+            }
+        }
+
+        time = System.currentTimeMillis() - time;
+
+        getLogger().info( "Finished data refresh process in " + time + "ms." );
+    }
+
+    /**
+     * Not all consumers work with all repositories.
+     * This will filter out those incompatible consumers based on the provided repository.
+     * 
+     * @param consumers the initial list of consumers.
+     * @param repository the repository to test consumer against.
+     * @return the filtered list of consumers.
+     */
+    private List filterConsumers( List consumers, ArtifactRepository repository )
+    {
+        List filtered = new ArrayList();
+
+        for ( Iterator it = consumers.iterator(); it.hasNext(); )
+        {
+            Consumer consumer = (Consumer) it.next();
+            if ( consumer.init( repository ) )
+            {
+                // Approved!
+                filtered.add( consumer );
+            }
+            else
+            {
+                getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository );
+            }
+        }
+
+        return filtered;
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java
deleted file mode 100644 (file)
index 25d3905..0000000
+++ /dev/null
@@ -1,317 +0,0 @@
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.filter.ReportingMetadataFilter;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.taskqueue.Task;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Edwin Punzalan
- * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
-public class IndexerTaskExecutor
-    extends AbstractLogEnabled
-    implements TaskExecutor
-{
-    /**
-     * Configuration store.
-     *
-     * @plexus.requirement
-     */
-    private ArchivaConfiguration archivaConfiguration;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryArtifactIndexFactory indexFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ConfiguredRepositoryFactory repoFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    /**
-     * @plexus.requirement role-hint="standard"
-     */
-    private RepositoryIndexRecordFactory recordFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportExecutor reportExecutor;
-
-    /**
-     * @plexus.requirement role-hint="health"
-     */
-    private ReportGroup reportGroup;
-
-    private long lastIndexingTime = 0;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public long getLastIndexingTime()
-    {
-        return lastIndexingTime;
-    }
-
-    public void executeTask( Task task )
-        throws TaskExecutionException
-    {
-        IndexerTask indexerTask = (IndexerTask) task;
-
-        getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
-
-        execute();
-    }
-
-    public void execute()
-        throws TaskExecutionException
-    {
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        execute( configuration, indexPath );
-    }
-
-    public void executeNowIfNeeded()
-        throws TaskExecutionException
-    {
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-
-        try
-        {
-            RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
-            if ( !artifactIndex.exists() )
-            {
-                execute( configuration, indexPath );
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-    }
-
-    private void execute( Configuration configuration, File indexPath )
-        throws TaskExecutionException
-    {
-        long time = System.currentTimeMillis();
-        getLogger().info( "Starting repository indexing process" );
-
-        RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
-
-        try
-        {
-            Collection keys;
-            if ( index.exists() )
-            {
-                keys = index.getAllRecordKeys();
-            }
-            else
-            {
-                keys = Collections.EMPTY_LIST;
-            }
-
-            for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
-            {
-                RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
-                if ( repositoryConfiguration.isIndexed() )
-                {
-                    List blacklistedPatterns = new ArrayList();
-                    if ( repositoryConfiguration.getBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
-                    }
-                    if ( configuration.getGlobalBlackListPatterns() != null )
-                    {
-                        blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
-                    }
-                    boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
-                    ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
-                    ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
-
-                    // keep original value in case there is another process under way
-                    long origStartTime = reporter.getStartTime();
-                    reporter.setStartTime( System.currentTimeMillis() );
-
-                    // Discovery process
-                    String layoutProperty = repositoryConfiguration.getLayout();
-                    ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-                    AndArtifactFilter filter = new AndArtifactFilter();
-                    filter.add( new IndexRecordExistsArtifactFilter( keys ) );
-                    if ( !includeSnapshots )
-                    {
-                        filter.add( new SnapshotArtifactFilter() );
-                    }
-
-                    // Save some memory by not tracking paths we won't use
-                    // TODO: Plexus CDC should be able to inject this configuration
-                    discoverer.setTrackOmittedPaths( false );
-
-                    getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
-                    List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-                    if ( !artifacts.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
-
-                        // Work through these in batches, then flush the project cache.
-                        for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-                        {
-                            int end = j + ARTIFACT_BUFFER_SIZE;
-                            List currentArtifacts =
-                                artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                            // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                            // run the reports. Done intermittently to avoid losing track of what is indexed since
-                            // that is what the filter is based on.
-                            reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                            index.indexArtifacts( currentArtifacts, recordFactory );
-
-                            // MRM-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                            // around that. TODO: remove when it is configurable
-                            flushProjectBuilderCacheHack();
-                        }
-                    }
-
-                    MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
-
-                    MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers
-                        .get( layoutProperty );
-                    List metadata =
-                        metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
-
-                    if ( !metadata.isEmpty() )
-                    {
-                        getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
-
-                        // run the reports
-                        reportExecutor.runMetadataReports( reportGroup, metadata, repository );
-                    }
-
-                    reporter.setStartTime( origStartTime );
-                }
-            }
-        }
-        catch ( RepositoryIndexException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( DiscovererException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-        catch ( ReportingStoreException e )
-        {
-            throw new TaskExecutionException( e.getMessage(), e );
-        }
-
-        time = System.currentTimeMillis() - time;
-        lastIndexingTime = System.currentTimeMillis();
-        getLogger().info( "Finished repository indexing process in " + time + "ms" );
-    }
-
-    /**
-     * @todo remove when no longer needed (MRM-142)
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    private void flushProjectBuilderCacheHack()
-    {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                getLogger().info( "projectBuilder is type " + projectBuilder.getClass().getName() );
-
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                getLogger().info( "projectBuilder.raw is type " + cache.getClass().getName() );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                getLogger().info( "projectBuilder.processed is type " + cache.getClass().getName() );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java
new file mode 100644 (file)
index 0000000..57d4b68
--- /dev/null
@@ -0,0 +1,41 @@
+package org.apache.maven.archiva.scheduler.task;
+
+/**
+ * DataRefreshTask - task for discovering changes in the repository 
+ * and updating all associated data. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshTask
+    implements RepositoryTask
+{
+    private String jobName;
+
+    private String policy;
+
+    public String getJobName()
+    {
+        return jobName;
+    }
+
+    public String getQueuePolicy()
+    {
+        return policy;
+    }
+
+    public void setJobName( String jobName )
+    {
+        this.jobName = jobName;
+    }
+
+    public void setQueuePolicy( String policy )
+    {
+        this.policy = policy;
+    }
+
+    public long getMaxExecutionTime()
+    {
+        return 0;
+    }
+}
diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java
deleted file mode 100644 (file)
index a4cd2f6..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-package org.apache.maven.archiva.scheduler.task;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Task for discovering changes in the repository and updating the index accordingly.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class IndexerTask
-    implements RepositoryTask
-{
-    private String jobName;
-
-    private String policy;
-
-    public long getMaxExecutionTime()
-    {
-        return 0;
-    }
-
-    public String getJobName()
-    {
-        return jobName;
-    }
-
-    public String getQueuePolicy()
-    {
-        return policy;
-    }
-
-    public void setQueuePolicy( String policy )
-    {
-        this.policy = policy;
-    }
-
-    public void setJobName( String jobName )
-    {
-        this.jobName = jobName;
-    }
-
-
-}
index 1fe09e4ff1ed6603d703cb0c1d2b6252534ce9d7..cbf418cb416b123528c54a28d9a4d3f281c815b1 100644 (file)
 <component-set>
   <components>
 
+    <!-- TODO: Remove once CDC can handle correct cross-module descriptor creation. -->
+
+    <!-- SNIP:START -->
+
+    <component>
+      <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+      <role-hint>index-artifact</role-hint>
+      <implementation>org.apache.maven.archiva.consumers.IndexArtifactConsumer</implementation>
+      <instantiation-strategy>per-lookup</instantiation-strategy>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+          <field-name>artifactFactory</field-name>
+        </requirement>
+      </requirements>
+    </component>
+
+    <component>
+      <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+      <role-hint>artifact-health</role-hint>
+      <implementation>org.apache.maven.archiva.consumers.ArtifactHealthConsumer</implementation>
+      <instantiation-strategy>per-lookup</instantiation-strategy>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+          <field-name>artifactFactory</field-name>
+        </requirement>
+      </requirements>
+    </component>
+
+    <component>
+      <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+      <role-hint>metadata-health</role-hint>
+      <implementation>org.apache.maven.archiva.consumers.RepositoryMetadataHealthConsumer</implementation>
+      <instantiation-strategy>per-lookup</instantiation-strategy>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+          <field-name>artifactFactory</field-name>
+        </requirement>
+      </requirements>
+    </component>
+
+    <!-- SNIP:END -->
+
+    <component>
+      <role>org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers</role>
+      <implementation>org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers</implementation>
+      <description>Mutable list of consumer for the Data Refresh.</description>
+      <configuration>
+        <consumer-names>
+          <consumer-name>index-artifact</consumer-name>
+          <consumer-name>artifact-health</consumer-name>
+          <consumer-name>metadata-health</consumer-name>
+        </consumer-names>
+      </configuration>
+    </component>
+
     <component>
       <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
-      <role-hint>indexer</role-hint>
+      <role-hint>data-refresh</role-hint>
       <implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
       <lifecycle-handler>plexus-configurable</lifecycle-handler>
       <configuration>
 
     <component>
       <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
-      <role-hint>indexer</role-hint>
+      <role-hint>data-refresh</role-hint>
       <implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
       <requirements>
         <requirement>
           <role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
-          <role-hint>indexer</role-hint>
+          <role-hint>data-refresh</role-hint>
         </requirement>
         <requirement>
           <role>org.codehaus.plexus.taskqueue.TaskQueue</role>
-          <role-hint>indexer</role-hint>
+          <role-hint>data-refresh</role-hint>
         </requirement>
       </requirements>
       <configuration>
-        <name>indexer</name>
+        <name>data-refresh</name>
       </configuration>
     </component>
     
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java
new file mode 100644 (file)
index 0000000..e037e09
--- /dev/null
@@ -0,0 +1,44 @@
+package org.apache.maven.archiva;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - added to allow IDE users to pull all tests into their tool. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva" );
+        //$JUnit-BEGIN$
+        suite.addTest( org.apache.maven.archiva.repositories.AllTests.suite() );
+        suite.addTest( org.apache.maven.archiva.scheduler.executors.AllTests.suite() );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java
deleted file mode 100644 (file)
index bd13a7d..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-package org.apache.maven.archiva;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Jason van Zyl
- */
-public class LegacyRepositoryConverterTest
-    extends PlexusTestCase
-{
-    public void testLegacyRepositoryConversion()
-        throws Exception
-    {
-        File legacyRepositoryDirectory = getTestFile( "src/test/maven-1.x-repository" );
-
-        File repositoryDirectory = getTestFile( "target/maven-2.x-repository" );
-
-        LegacyRepositoryConverter rm = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE );
-
-        rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, null, true );
-    }
-}
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java
new file mode 100644 (file)
index 0000000..0b82640
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.repositories;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.repositories" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( DefaultActiveManagedRepositoriesTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
index a535307e2b62863e71d9c679aaa9f67d949693fd..f4e89eadfccbbf31b2fb20846c1b162ec9379db2 100644 (file)
@@ -19,15 +19,15 @@ package org.apache.maven.archiva.repositories;
  * under the License.
  */
 
-import org.apache.maven.archiva.artifact.ManagedArtifact;
-import org.apache.maven.archiva.artifact.ManagedEjbArtifact;
-import org.apache.maven.archiva.artifact.ManagedJavaArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact;
 import org.codehaus.plexus.PlexusTestCase;
 
 /**
  * DefaultActiveManagedRepositoriesTest
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class DefaultActiveManagedRepositoriesTest
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java
new file mode 100644 (file)
index 0000000..9fdfcc1
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.scheduler.executors" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( DataRefreshExecutorTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java
new file mode 100644 (file)
index 0000000..ad99007
--- /dev/null
@@ -0,0 +1,75 @@
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.File;
+
+/**
+ * IndexerTaskExecutorTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshExecutorTest
+    extends PlexusTestCase
+{
+    private TaskExecutor taskExecutor;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" );
+
+        ArchivaConfiguration archivaConfiguration =
+            (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
+        Configuration configuration = archivaConfiguration.getConfiguration();
+
+        File indexPath = new File( configuration.getIndexPath() );
+        if ( indexPath.exists() )
+        {
+            FileUtils.deleteDirectory( indexPath );
+        }
+    }
+
+    public void testExecutor()
+        throws TaskExecutionException
+    {
+        taskExecutor.executeTask( new TestDataRefreshTask() );
+    }
+
+    class TestDataRefreshTask
+        extends DataRefreshTask
+    {
+        public String getJobName()
+        {
+            return "TestDataRefresh";
+        }
+    }
+}
diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java
deleted file mode 100644 (file)
index 8729e0c..0000000
+++ /dev/null
@@ -1,75 +0,0 @@
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-
-/**
- * IndexerTaskExecutorTest
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class IndexerTaskExecutorTest
-    extends PlexusTestCase
-{
-    private TaskExecutor taskExecutor;
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "indexer" );
-
-        ArchivaConfiguration archivaConfiguration =
-            (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
-        Configuration configuration = archivaConfiguration.getConfiguration();
-
-        File indexPath = new File( configuration.getIndexPath() );
-        if ( indexPath.exists() )
-        {
-            FileUtils.deleteDirectory( indexPath );
-        }
-    }
-
-    public void testIndexer()
-        throws TaskExecutionException
-    {
-        taskExecutor.executeTask( new TestIndexerTask() );
-    }
-
-    class TestIndexerTask
-        extends IndexerTask
-    {
-        public String getJobName()
-        {
-            return "TestIndexer";
-        }
-    }
-}
diff --git a/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml b/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml
new file mode 100644 (file)
index 0000000..5a7d810
--- /dev/null
@@ -0,0 +1,90 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.codehaus.plexus.registry.Registry</role>
+      <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
+      <role-hint>commons-configuration</role-hint>
+      <configuration>
+        <properties>
+          <xml fileName="${basedir}/src/test/conf/archiva.xml"
+               config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
+        </properties>
+      </configuration>
+    </component>
+    
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+      
+      <configuration>
+        <!-- Database Configuration -->
+        <driverName>org.hsqldb.jdbcDriver</driverName>
+        <url>jdbc:hsqldb:mem:TESTDB</url>
+        <userName>sa</userName>
+        <password></password>
+        
+        <!-- JPOX and JDO configuration -->
+        <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+        <otherProperties>
+          <property>
+            <name>javax.jdo.PersistenceManagerFactoryClass</name>
+            <value>org.jpox.PersistenceManagerFactoryImpl</value>
+          </property>
+          <property>
+            <name>org.jpox.autoCreateSchema</name>
+            <value>true</value>
+          </property>
+          <property>
+            <name>org.jpox.validateTables</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateConstraints</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateColumns</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.autoStartMechanism</name>
+            <value>None</value>
+          </property>
+          <property>
+            <name>org.jpox.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.poid.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.rdbms.dateTimezone</name>
+            <value>JDK_DEFAULT_TIMEZONE</value>
+          </property>
+        </otherProperties>
+      </configuration>
+    </component>    
+  </components>
+</component-set>
diff --git a/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml b/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml
deleted file mode 100644 (file)
index 34d1502..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<component-set>
-  <components>
-    <component>
-      <role>org.codehaus.plexus.registry.Registry</role>
-      <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
-      <role-hint>commons-configuration</role-hint>
-      <configuration>
-        <properties>
-          <xml fileName="${basedir}/src/test/conf/archiva.xml"
-               config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
-        </properties>
-      </configuration>
-    </component>
-  </components>
-</component-set>
index 90ccb9facf0c9e0fa9626b2c74b8570314b60954..8a96e39c58646b7f5dc27f97851c8454f445b559 100755 (executable)
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <artifactId>archiva-discoverer</artifactId>
-  <name>Archiva Artifact Discoverer</name>
+  <name>Archiva Discoverer</name>
   <dependencies>
+    <dependency>
+      <groupId>org.apache.maven.archiva</groupId>
+      <artifactId>archiva-common</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
       <artifactId>plexus-utils</artifactId>
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java
deleted file mode 100644 (file)
index f3002ec..0000000
+++ /dev/null
@@ -1,117 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for artifact discoverers.
- *
- * @author John Casey
- * @author Brett Porter
- */
-public abstract class AbstractArtifactDiscoverer
-    extends AbstractDiscoverer
-    implements ArtifactDiscoverer
-{
-    /**
-     * Standard patterns to exclude from discovery as they are not artifacts.
-     */
-    private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".index", ".reports/**",
-        ".maven/**", "**/*.md5", "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**",
-        "*/licenses/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*",
-        "**/CHANGELOG*", "**/KEYS*"};
-
-    private List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns )
-    {
-        return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES );
-    }
-
-    public List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
-        throws DiscovererException
-    {
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
-        }
-
-        File repositoryBase = new File( repository.getBasedir() );
-
-        List artifacts = new ArrayList();
-
-        if ( repositoryBase.exists() )
-        {
-            List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns );
-
-            for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
-            {
-                String path = (String) i.next();
-    
-                try
-                {
-                    Artifact artifact = buildArtifactFromPath( path, repository );
-    
-                    if ( filter.include( artifact ) )
-                    {
-                        artifacts.add( artifact );
-                    }
-                    else
-                    {
-                        addExcludedPath( path, "Omitted by filter" );
-                    }
-                }
-                catch ( DiscovererException e )
-                {
-                    addKickedOutPath( path, e.getMessage() );
-                }
-            }
-        }
-        return artifacts;
-    }
-
-    /**
-     * Returns an artifact object that is represented by the specified path in a repository
-     *
-     * @param path       The path that is pointing to an artifact
-     * @param repository The repository of the artifact
-     * @return Artifact
-     * @throws DiscovererException when the specified path does correspond to an artifact
-     */
-    public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
-        throws DiscovererException
-    {
-        Artifact artifact = buildArtifact( path );
-
-        if ( artifact != null )
-        {
-            artifact.setRepository( repository );
-            artifact.setFile( new File( repository.getBasedir(), path ) );
-        }
-
-        return artifact;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java
deleted file mode 100644 (file)
index 7e0ee4f..0000000
+++ /dev/null
@@ -1,158 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.DirectoryScanner;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for the artifact and metadata discoverers.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDiscoverer
-    extends AbstractLogEnabled
-    implements Discoverer
-{
-    private List kickedOutPaths = new ArrayList();
-
-    /**
-     * @plexus.requirement
-     */
-    protected ArtifactFactory artifactFactory;
-
-    private static final String[] EMPTY_STRING_ARRAY = new String[0];
-
-    private List excludedPaths = new ArrayList();
-
-    /**
-     * @plexus.configuration default-value="true"
-     */
-    private boolean trackOmittedPaths;
-
-    /**
-     * Add a path to the list of files that were kicked out due to being invalid.
-     *
-     * @param path   the path to add
-     * @param reason the reason why the path is being kicked out
-     */
-    protected void addKickedOutPath( String path, String reason )
-    {
-        if ( trackOmittedPaths )
-        {
-            kickedOutPaths.add( new DiscovererPath( path, reason ) );
-        }
-    }
-
-    /**
-     * Add a path to the list of files that were excluded.
-     *
-     * @param path   the path to add
-     * @param reason the reason why the path is excluded
-     */
-    protected void addExcludedPath( String path, String reason )
-    {
-        excludedPaths.add( new DiscovererPath( path, reason ) );
-    }
-
-    /**
-     * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
-     *
-     * @return Iterator for the DiscovererPath List
-     */
-    public Iterator getKickedOutPathsIterator()
-    {
-        assert trackOmittedPaths;
-        return kickedOutPaths.iterator();
-    }
-
-    protected List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns, String[] includes,
-                                         String[] excludes )
-    {
-        List allExcludes = new ArrayList();
-        allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
-        if ( excludes != null )
-        {
-            allExcludes.addAll( Arrays.asList( excludes ) );
-        }
-        if ( blacklistedPatterns != null )
-        {
-            allExcludes.addAll( blacklistedPatterns );
-        }
-
-        DirectoryScanner scanner = new DirectoryScanner();
-
-        scanner.setBasedir( repositoryBase );
-
-        if ( includes != null )
-        {
-            scanner.setIncludes( includes );
-        }
-        scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
-
-        // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
-        scanner.scan();
-
-        if ( trackOmittedPaths )
-        {
-            for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
-            {
-                String path = files.next().toString();
-
-                excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
-            }
-        }
-
-        // TODO: this could be a part of the scanner
-        List includedPaths = new ArrayList();
-        for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
-        {
-            String path = files.next().toString();
-
-            includedPaths.add( path );
-        }
-
-        return includedPaths;
-    }
-
-    /**
-     * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
-     *
-     * @return Iterator for the DiscovererPath List
-     */
-    public Iterator getExcludedPathsIterator()
-    {
-        assert trackOmittedPaths;
-        return excludedPaths.iterator();
-    }
-
-    public void setTrackOmittedPaths( boolean trackOmittedPaths )
-    {
-        this.trackOmittedPaths = trackOmittedPaths;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java
deleted file mode 100644 (file)
index 50873b2..0000000
+++ /dev/null
@@ -1,66 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Interface for implementation that can discover artifacts within a repository.
- *
- * @author John Casey
- * @author Brett Porter
- * @todo do we want blacklisted patterns in another form? Part of the object construction?
- * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
- * @todo instead of a returned list, should a listener be passed in?
- */
-public interface ArtifactDiscoverer
-    extends Discoverer
-{
-    String ROLE = ArtifactDiscoverer.class.getName();
-
-    /**
-     * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
-     * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
-     * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
-     *
-     * @param repository          the location of the repository
-     * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
-     * @param filter              filter for artifacts to include in the discovered list
-     * @return the list of artifacts discovered
-     * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
-     */
-    List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
-        throws DiscovererException;
-
-    /**
-     * Build an artifact from a path in the repository
-     *
-     * @param path the path
-     * @return the artifact
-     * @throws DiscovererException if the file is not a valid artifact
-     * @todo this should be in maven-artifact
-     */
-    Artifact buildArtifact( String path )
-        throws DiscovererException;
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java
deleted file mode 100644 (file)
index f085602..0000000
+++ /dev/null
@@ -1,200 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the new repository layout (Maven 2.0+).
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="default"
- */
-public class DefaultArtifactDiscoverer
-    extends AbstractArtifactDiscoverer
-{
-    /**
-     * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
-     */
-    public Artifact buildArtifact( String path )
-        throws DiscovererException
-    {
-        List pathParts = new ArrayList();
-        StringTokenizer st = new StringTokenizer( path, "/\\" );
-        while ( st.hasMoreTokens() )
-        {
-            pathParts.add( st.nextToken() );
-        }
-
-        Collections.reverse( pathParts );
-
-        Artifact artifact;
-        if ( pathParts.size() >= 4 )
-        {
-            // maven 2.x path
-
-            // the actual artifact filename.
-            String filename = (String) pathParts.remove( 0 );
-
-            // the next one is the version.
-            String version = (String) pathParts.remove( 0 );
-
-            // the next one is the artifactId.
-            String artifactId = (String) pathParts.remove( 0 );
-
-            // the remaining are the groupId.
-            Collections.reverse( pathParts );
-            String groupId = StringUtils.join( pathParts.iterator(), "." );
-
-            String remainingFilename = filename;
-            if ( remainingFilename.startsWith( artifactId + "-" ) )
-            {
-                remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
-
-                String classifier = null;
-
-                // TODO: use artifact handler, share with legacy discoverer
-                String type;
-                if ( remainingFilename.endsWith( ".tar.gz" ) )
-                {
-                    type = "distribution-tgz";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
-                }
-                else if ( remainingFilename.endsWith( ".zip" ) )
-                {
-                    type = "distribution-zip";
-                    remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
-                }
-                else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
-                {
-                    type = "java-source";
-                    classifier = "test-sources";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - "-test-sources.jar".length() );
-                }
-                else if ( remainingFilename.endsWith( "-sources.jar" ) )
-                {
-                    type = "java-source";
-                    classifier = "sources";
-                    remainingFilename =
-                        remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
-                }
-                else
-                {
-                    int index = remainingFilename.lastIndexOf( "." );
-                    if ( index >= 0 )
-                    {
-                        type = remainingFilename.substring( index + 1 );
-                        remainingFilename = remainingFilename.substring( 0, index );
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename does not have an extension" );
-                    }
-                }
-
-                Artifact result;
-                if ( classifier == null )
-                {
-                    result =
-                        artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
-                }
-                else
-                {
-                    result =
-                        artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
-                }
-
-                if ( result.isSnapshot() )
-                {
-                    // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
-                    int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
-                    if ( classifierIndex >= 0 )
-                    {
-                        classifier = remainingFilename.substring( classifierIndex + 1 );
-                        remainingFilename = remainingFilename.substring( 0, classifierIndex );
-                        result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
-                                                                               type, classifier );
-                    }
-                    else
-                    {
-                        result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
-                                                                 Artifact.SCOPE_RUNTIME, type );
-                    }
-
-                    // poor encapsulation requires we do this to populate base version
-                    if ( !result.isSnapshot() )
-                    {
-                        throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
-                    }
-                    else if ( !result.getBaseVersion().equals( version ) )
-                    {
-                        throw new DiscovererException(
-                            "Built snapshot artifact base version does not match path version: " + result +
-                                "; should have been version: " + version );
-                    }
-                    else
-                    {
-                        artifact = result;
-                    }
-                }
-                else if ( !remainingFilename.startsWith( version ) )
-                {
-                    throw new DiscovererException( "Built artifact version does not match path version" );
-                }
-                else if ( !remainingFilename.equals( version ) )
-                {
-                    if ( remainingFilename.charAt( version.length() ) == '-' )
-                    {
-                        classifier = remainingFilename.substring( version.length() + 1 );
-                        artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
-                                                                                 classifier );
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path version does not corresspond to an artifact version" );
-                    }
-                }
-                else
-                {
-                    artifact = result;
-                }
-            }
-            else
-            {
-                throw new DiscovererException( "Path filename does not correspond to an artifact" );
-            }
-        }
-        else
-        {
-            throw new DiscovererException( "Path is too short to build an artifact from" );
-        }
-
-        return artifact;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java
new file mode 100644 (file)
index 0000000..bd5e4fd
--- /dev/null
@@ -0,0 +1,178 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.DirectoryWalker;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Discoverer Implementation.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @plexus.component role="org.apache.maven.archiva.discoverer.Discoverer"
+ */
+public class DefaultDiscoverer
+    extends AbstractLogEnabled
+    implements Discoverer
+{
+    /**
+     * Standard patterns to exclude from discovery as they are usually noise.
+     */
+    private static final String[] STANDARD_DISCOVERY_EXCLUDES = {
+        "bin/**",
+        "reports/**",
+        ".index",
+        ".reports/**",
+        ".maven/**",
+        "**/*snapshot-version",
+        "*/website/**",
+        "*/licences/**",
+        "**/.htaccess",
+        "**/*.html",
+        "**/*.txt",
+        "**/README*",
+        "**/CHANGELOG*",
+        "**/KEYS*" };
+
+    public DefaultDiscoverer()
+    {
+    }
+
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+        throws DiscovererException
+    {
+        return walkRepository( repository, consumers, includeSnapshots, 0, null, null );
+    }
+
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+                                                boolean includeSnapshots, long onlyModifiedAfterTimestamp,
+                                                List extraFileExclusions, List extraFileInclusions )
+        throws DiscovererException
+    {
+        // Sanity Check
+
+        if ( repository == null )
+        {
+            throw new IllegalArgumentException( "Unable to operate on a null repository." );
+        }
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            throw new UnsupportedOperationException( "Only filesystem repositories are supported." );
+        }
+
+        File repositoryBase = new File( repository.getBasedir() );
+
+        if ( !repositoryBase.exists() )
+        {
+            throw new UnsupportedOperationException( "Unable to scan a repository, directory "
+                + repositoryBase.getAbsolutePath() + " does not exist." );
+        }
+
+        if ( !repositoryBase.isDirectory() )
+        {
+            throw new UnsupportedOperationException( "Unable to scan a repository, path "
+                + repositoryBase.getAbsolutePath() + " is not a directory." );
+        }
+
+        // Setup Includes / Excludes.
+
+        List allExcludes = new ArrayList();
+        List allIncludes = new ArrayList();
+
+        // Exclude all of the SCM patterns.
+        allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
+
+        // Exclude all of the archiva noise patterns.
+        allExcludes.addAll( Arrays.asList( STANDARD_DISCOVERY_EXCLUDES ) );
+
+        if ( !includeSnapshots )
+        {
+            allExcludes.add( "**/*-SNAPSHOT*" );
+        }
+
+        if ( extraFileExclusions != null )
+        {
+            allExcludes.addAll( extraFileExclusions );
+        }
+
+        Iterator it = consumers.iterator();
+        while ( it.hasNext() )
+        {
+            Consumer consumer = (Consumer) it.next();
+
+            /* NOTE: Do not insert the consumer exclusion patterns here.
+             * Exclusion patterns are handled by RepositoryScanner.wantsFile(Consumer, String)
+             * 
+             * addUniqueElements( consumer.getExcludePatterns(), allExcludes );
+             */
+            addUniqueElements( consumer.getIncludePatterns(), allIncludes );
+        }
+
+        if ( extraFileInclusions != null )
+        {
+            allIncludes.addAll( extraFileInclusions );
+        }
+
+        // Setup Directory Walker
+
+        DirectoryWalker dirWalker = new DirectoryWalker();
+
+        dirWalker.setBaseDir( repositoryBase );
+
+        dirWalker.setIncludes( allIncludes );
+        dirWalker.setExcludes( allExcludes );
+
+        // Setup the Scan Instance
+        RepositoryScanner repoScanner = new RepositoryScanner( repository, consumers );
+        repoScanner.setOnlyModifiedAfterTimestamp( onlyModifiedAfterTimestamp );
+
+        repoScanner.setLogger( getLogger() );
+        dirWalker.addDirectoryWalkListener( repoScanner );
+
+        // Execute scan.
+        dirWalker.scan();
+
+        return repoScanner.getStatistics();
+    }
+
+    private void addUniqueElements( List fromList, List toList )
+    {
+        Iterator itFrom = fromList.iterator();
+        while ( itFrom.hasNext() )
+        {
+            Object o = itFrom.next();
+            if ( !toList.contains( o ) )
+            {
+                toList.add( o );
+            }
+        }
+    }
+}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java
deleted file mode 100644 (file)
index f979723..0000000
+++ /dev/null
@@ -1,226 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * This class gets all the paths that contain the metadata files.
- *
- * @plexus.component role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" role-hint="default"
- */
-public class DefaultMetadataDiscoverer
-    extends AbstractDiscoverer
-    implements MetadataDiscoverer
-{
-    /**
-     * Standard patterns to include in discovery of metadata files.
-     *
-     * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
-     * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
-     * searching the local metadata in the first place though?
-     */
-    private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
-
-    public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter )
-        throws DiscovererException
-    {
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
-        }
-
-        List metadataFiles = new ArrayList();
-
-        File repositoryBase = new File( repository.getBasedir() );
-        if ( repositoryBase.exists() )
-        {
-            List metadataPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns,
-                                                       STANDARD_DISCOVERY_INCLUDES, null );
-
-            for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
-            {
-                String metadataPath = (String) i.next();
-                try
-                {
-                    RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
-                    File f = new File( repository.getBasedir(), metadataPath );
-                    if ( filter.include( metadata, f.lastModified() ) )
-                    {
-                        metadataFiles.add( metadata );
-                    }
-                    else
-                    {
-                        addExcludedPath( metadataPath, "Metadata excluded by filter" );
-                    }
-                }
-                catch ( DiscovererException e )
-                {
-                    addKickedOutPath( metadataPath, e.getMessage() );
-                }
-            }
-        }
-        return metadataFiles;
-    }
-
-    public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
-        throws DiscovererException
-    {
-        return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-    }
-
-    private RepositoryMetadata buildMetadata( String repo, String metadataPath )
-        throws DiscovererException
-    {
-        Metadata m;
-        File f = new File( repo, metadataPath );
-        try
-        {
-            Reader reader = new FileReader( f );
-            MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
-
-            m = metadataReader.read( reader );
-        }
-        catch ( XmlPullParserException e )
-        {
-            throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
-        }
-        catch ( IOException e )
-        {
-            throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
-        }
-
-        RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
-
-        if ( repositoryMetadata == null )
-        {
-            throw new DiscovererException( "Unable to build a repository metadata from path" );
-        }
-
-        return repositoryMetadata;
-    }
-
-    /**
-     * Builds a RepositoryMetadata object from a Metadata object and its path.
-     *
-     * @param m            Metadata
-     * @param metadataPath path
-     * @return RepositoryMetadata if the parameters represent one; null if not
-     * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
-     */
-    private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
-    {
-        String metaGroupId = m.getGroupId();
-        String metaArtifactId = m.getArtifactId();
-        String metaVersion = m.getVersion();
-
-        // check if the groupId, artifactId and version is in the
-        // metadataPath
-        // parse the path, in reverse order
-        List pathParts = new ArrayList();
-        StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
-        while ( st.hasMoreTokens() )
-        {
-            pathParts.add( st.nextToken() );
-        }
-
-        Collections.reverse( pathParts );
-        // remove the metadata file
-        pathParts.remove( 0 );
-        Iterator it = pathParts.iterator();
-        String tmpDir = (String) it.next();
-
-        Artifact artifact = null;
-        if ( StringUtils.isNotEmpty( metaVersion ) )
-        {
-            artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
-        }
-
-        // snapshotMetadata
-        RepositoryMetadata metadata = null;
-        if ( tmpDir != null && tmpDir.equals( metaVersion ) )
-        {
-            if ( artifact != null )
-            {
-                metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-            }
-        }
-        else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
-        {
-            // artifactMetadata
-            if ( artifact != null )
-            {
-                metadata = new ArtifactRepositoryMetadata( artifact );
-            }
-            else
-            {
-                artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
-                metadata = new ArtifactRepositoryMetadata( artifact );
-            }
-        }
-        else
-        {
-            String groupDir = "";
-            int ctr = 0;
-            for ( it = pathParts.iterator(); it.hasNext(); )
-            {
-                String path = (String) it.next();
-                if ( ctr == 0 )
-                {
-                    groupDir = path;
-                }
-                else
-                {
-                    groupDir = path + "." + groupDir;
-                }
-                ctr++;
-            }
-
-            // groupMetadata
-            if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
-            {
-                metadata = new GroupRepositoryMetadata( metaGroupId );
-            }
-        }
-
-        return metadata;
-    }
-}
index 157557bf2a234f64f54eadb54dca39dd92097604..4f670c4e862a5130f73459f0d4949c9633d45070 100644 (file)
@@ -19,26 +19,55 @@ package org.apache.maven.archiva.discoverer;
  * under the License.
  */
 
-import java.util.Iterator;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+import java.util.List;
 
 /**
- * @author Edwin Punzalan
+ * Discoverer - generic discoverer of content in an ArtifactRepository. 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
  */
 public interface Discoverer
 {
+    public static final String ROLE = Discoverer.class.getName();
+    
     /**
-     * Get the list of paths kicked out during the discovery process.
-     *
-     * @return the paths as Strings.
+     * Walk the repository, and report to the consumers the files found.
+     * 
+     * Report changes to the appropriate Consumer.
+     * 
+     * This is just a convenience method to {@link #walkRepository(ArtifactRepository, List, boolean, long, List, List)}
+     * equivalent to calling <code>walkRepository( repository, consumers, includeSnapshots, 0, null, null );</code>
+     * 
+     * @param repository the repository to change.
+     * @param consumers use the provided list of consumers.
+     * @param includeSnapshots true to include snapshots in the walking of this repository.
+     * @return the statistics for this scan.
+     * @throws DiscovererException if there was a fundamental problem with getting the discoverer started.
      */
-    Iterator getKickedOutPathsIterator();
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+        throws DiscovererException;
 
     /**
-     * Get the list of paths excluded during the discovery process.
-     *
-     * @return the paths as Strings.
+     * Walk the repository, and report to the consumers the files found.
+     * 
+     * Report changes to the appropriate Consumer.
+     * 
+     * @param repository the repository to change.
+     * @param consumers use the provided list of consumers.
+     * @param includeSnapshots true to include snapshots in the scanning of this repository.
+     * @param onlyModifiedAfterTimestamp Only report to the consumers, files that have a {@link File#lastModified()}) 
+     *          after the provided timestamp.
+     * @param extraFileExclusions an optional list of file exclusions on the walk.
+     * @param extraFileInclusions an optional list of file inclusions on the walk.
+     * @return the statistics for this scan.
+     * @throws DiscovererException if there was a fundamental problem with getting the discoverer started. 
      */
-    Iterator getExcludedPathsIterator();
-
-    void setTrackOmittedPaths( boolean trackOmittedPaths );
+    public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+                                                boolean includeSnapshots, long onlyModifiedAfterTimestamp,
+                                                List extraFileExclusions, List extraFileInclusions )
+        throws DiscovererException;
 }
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java
deleted file mode 100644 (file)
index b893078..0000000
+++ /dev/null
@@ -1,52 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererPath
-{
-    /**
-     * The path discovered.
-     */
-    private final String path;
-
-    /**
-     * A comment about why the path is being processed.
-     */
-    private final String comment;
-
-    public DiscovererPath( String path, String comment )
-    {
-        this.path = path;
-        this.comment = comment;
-    }
-
-    public String getPath()
-    {
-        return path;
-    }
-
-    public String getComment()
-    {
-        return comment;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java
new file mode 100644 (file)
index 0000000..28c2186
--- /dev/null
@@ -0,0 +1,198 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.IOUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Properties;
+
+/**
+ * DiscovererStatistics 
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DiscovererStatistics
+{
+    private static final String PROP_FILES_CONSUMED = "scan.consumed.files";
+
+    private static final String PROP_FILES_INCLUDED = "scan.included.files";
+
+    private static final String PROP_FILES_SKIPPED = "scan.skipped.files";
+
+    private static final String PROP_TIMESTAMP_STARTED = "scan.started.timestamp";
+
+    private static final String PROP_TIMESTAMP_FINISHED = "scan.finished.timestamp";
+
+    protected long timestampStarted = 0;
+
+    protected long timestampFinished = 0;
+
+    protected long filesIncluded = 0;
+
+    protected long filesConsumed = 0;
+
+    protected long filesSkipped = 0;
+
+    private ArtifactRepository repository;
+
+    public DiscovererStatistics( ArtifactRepository repository )
+    {
+        this.repository = repository;
+    }
+
+    public void load( String filename )
+        throws IOException
+    {
+        File repositoryBase = new File( this.repository.getBasedir() );
+
+        File scanProperties = new File( repositoryBase, filename );
+        FileInputStream fis = null;
+        try
+        {
+            Properties props = new Properties();
+            fis = new FileInputStream( scanProperties );
+            props.load( fis );
+
+            timestampFinished = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_FINISHED ), 0 );
+            timestampStarted = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_STARTED ), 0 );
+            filesIncluded = NumberUtils.toLong( props.getProperty( PROP_FILES_INCLUDED ), 0 );
+            filesConsumed = NumberUtils.toLong( props.getProperty( PROP_FILES_CONSUMED ), 0 );
+            filesSkipped = NumberUtils.toLong( props.getProperty( PROP_FILES_SKIPPED ), 0 );
+        }
+        catch ( IOException e )
+        {
+            reset();
+            throw e;
+        }
+        finally
+        {
+            IOUtil.close( fis );
+        }
+    }
+
+    public void save( String filename )
+        throws IOException
+    {
+        Properties props = new Properties();
+        props.setProperty( PROP_TIMESTAMP_FINISHED, String.valueOf( timestampFinished ) );
+        props.setProperty( PROP_TIMESTAMP_STARTED, String.valueOf( timestampStarted ) );
+        props.setProperty( PROP_FILES_INCLUDED, String.valueOf( filesIncluded ) );
+        props.setProperty( PROP_FILES_CONSUMED, String.valueOf( filesConsumed ) );
+        props.setProperty( PROP_FILES_SKIPPED, String.valueOf( filesSkipped ) );
+
+        File repositoryBase = new File( this.repository.getBasedir() );
+        File statsFile = new File( repositoryBase, filename );
+
+        FileOutputStream fos = null;
+        try
+        {
+            fos = new FileOutputStream( statsFile );
+            props.store( fos, "Last Scan Information, managed by Archiva. DO NOT EDIT" );
+            fos.flush();
+        }
+        finally
+        {
+            IOUtil.close( fos );
+        }
+    }
+
+    public void reset()
+    {
+        timestampStarted = 0;
+        timestampFinished = 0;
+        filesIncluded = 0;
+        filesConsumed = 0;
+        filesSkipped = 0;
+    }
+
+    public long getElapsedMilliseconds()
+    {
+        return timestampFinished - timestampStarted;
+    }
+
+    public long getFilesConsumed()
+    {
+        return filesConsumed;
+    }
+
+    public long getFilesIncluded()
+    {
+        return filesIncluded;
+    }
+
+    public ArtifactRepository getRepository()
+    {
+        return repository;
+    }
+
+    public long getTimestampFinished()
+    {
+        return timestampFinished;
+    }
+
+    public long getTimestampStarted()
+    {
+        return timestampStarted;
+    }
+
+    public long getFilesSkipped()
+    {
+        return filesSkipped;
+    }
+
+    public void setTimestampFinished( long timestampFinished )
+    {
+        this.timestampFinished = timestampFinished;
+    }
+
+    public void setTimestampStarted( long timestampStarted )
+    {
+        this.timestampStarted = timestampStarted;
+    }
+
+    public void dump( Logger logger )
+    {
+        logger.info( "----------------------------------------------------" );
+        logger.info( "Scan of Repository: " + repository.getId() );
+        logger.info( "   Started : " + toHumanTimestamp( this.getTimestampStarted() ) );
+        logger.info( "   Finished: " + toHumanTimestamp( this.getTimestampFinished() ) );
+        // TODO: pretty print ellapsed time.
+        logger.info( "   Duration: " + this.getElapsedMilliseconds() + "ms" );
+        logger.info( "   Files   : " + this.getFilesIncluded() );
+        logger.info( "   Consumed: " + this.getFilesConsumed() );
+        logger.info( "   Skipped : " + this.getFilesSkipped() );
+    }
+    
+    private String toHumanTimestamp( long timestamp )
+    {
+        SimpleDateFormat dateFormat = new SimpleDateFormat();
+        return dateFormat.format( new Date( timestamp ) );
+    }
+}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java
deleted file mode 100644 (file)
index 13b8080..0000000
+++ /dev/null
@@ -1,291 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the legacy repository layout (Maven 1.x).
- * Method used to build an artifact object using a relative path from a repository base directory.  An artifactId
- * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
- * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
- * they are reserved for version usage.
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="legacy"
- */
-public class LegacyArtifactDiscoverer
-    extends AbstractArtifactDiscoverer
-{
-    /**
-     * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
-     */
-    public Artifact buildArtifact( String path )
-        throws DiscovererException
-    {
-        StringTokenizer tokens = new StringTokenizer( path, "/\\" );
-
-        Artifact result;
-
-        int numberOfTokens = tokens.countTokens();
-
-        if ( numberOfTokens == 3 )
-        {
-            String groupId = tokens.nextToken();
-
-            String type = tokens.nextToken();
-
-            if ( type.endsWith( "s" ) )
-            {
-                type = type.substring( 0, type.length() - 1 );
-
-                // contains artifactId, version, classifier, and extension.
-                String avceGlob = tokens.nextToken();
-
-                //noinspection CollectionDeclaredAsConcreteClass
-                LinkedList avceTokenList = new LinkedList();
-
-                StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
-                while ( avceTokenizer.hasMoreTokens() )
-                {
-                    avceTokenList.addLast( avceTokenizer.nextToken() );
-                }
-
-                String lastAvceToken = (String) avceTokenList.removeLast();
-
-                // TODO: share with other discoverer, use artifact handlers instead
-                if ( lastAvceToken.endsWith( ".tar.gz" ) )
-                {
-                    type = "distribution-tgz";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( "sources.jar" ) )
-                {
-                    type = "java-source";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
-                {
-                    type = "javadoc.jar";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else if ( lastAvceToken.endsWith( ".zip" ) )
-                {
-                    type = "distribution-zip";
-
-                    lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
-
-                    avceTokenList.addLast( lastAvceToken );
-                }
-                else
-                {
-                    int extPos = lastAvceToken.lastIndexOf( '.' );
-
-                    if ( extPos > 0 )
-                    {
-                        String ext = lastAvceToken.substring( extPos + 1 );
-                        if ( type.equals( ext ) || "plugin".equals( type ) )
-                        {
-                            lastAvceToken = lastAvceToken.substring( 0, extPos );
-
-                            avceTokenList.addLast( lastAvceToken );
-                        }
-                        else
-                        {
-                            throw new DiscovererException( "Path type does not match the extension" );
-                        }
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename does not have an extension" );
-                    }
-                }
-
-                // let's discover the version, and whatever's leftover will be either
-                // a classifier, or part of the artifactId, depending on position.
-                // Since version is at the end, we have to move in from the back.
-                Collections.reverse( avceTokenList );
-
-                // TODO: this is obscene - surely a better way?
-                String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
-                    "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
-                    "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
-                    "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
-                    "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
-                    "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
-                    "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
-
-                StringBuffer classifierBuffer = new StringBuffer();
-                StringBuffer versionBuffer = new StringBuffer();
-
-                boolean firstVersionTokenEncountered = false;
-                boolean firstToken = true;
-
-                int tokensIterated = 0;
-                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
-                {
-                    String token = (String) it.next();
-
-                    boolean tokenIsVersionPart = token.matches( validVersionParts );
-
-                    StringBuffer bufferToUpdate;
-
-                    // NOTE: logic in code is reversed, since we're peeling off the back
-                    // Any token after the last versionPart will be in the classifier.
-                    // Any token UP TO first non-versionPart is part of the version.
-                    if ( !tokenIsVersionPart )
-                    {
-                        if ( firstVersionTokenEncountered )
-                        {
-                            //noinspection BreakStatement
-                            break;
-                        }
-                        else
-                        {
-                            bufferToUpdate = classifierBuffer;
-                        }
-                    }
-                    else
-                    {
-                        firstVersionTokenEncountered = true;
-
-                        bufferToUpdate = versionBuffer;
-                    }
-
-                    if ( firstToken )
-                    {
-                        firstToken = false;
-                    }
-                    else
-                    {
-                        bufferToUpdate.insert( 0, '-' );
-                    }
-
-                    bufferToUpdate.insert( 0, token );
-
-                    tokensIterated++;
-                }
-
-                // Now, restore the proper ordering so we can build the artifactId.
-                Collections.reverse( avceTokenList );
-
-                // if we didn't find a version, then punt. Use the last token
-                // as the version, and set the classifier empty.
-                if ( versionBuffer.length() < 1 )
-                {
-                    if ( avceTokenList.size() > 1 )
-                    {
-                        int lastIdx = avceTokenList.size() - 1;
-
-                        versionBuffer.append( avceTokenList.get( lastIdx ) );
-                        avceTokenList.remove( lastIdx );
-                    }
-
-                    classifierBuffer.setLength( 0 );
-                }
-                else
-                {
-                    // if everything is kosher, then pop off all the classifier and
-                    // version tokens, leaving the naked artifact id in the list.
-                    avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
-                }
-
-                StringBuffer artifactIdBuffer = new StringBuffer();
-
-                firstToken = true;
-                for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
-                {
-                    String token = (String) it.next();
-
-                    if ( firstToken )
-                    {
-                        firstToken = false;
-                    }
-                    else
-                    {
-                        artifactIdBuffer.append( '-' );
-                    }
-
-                    artifactIdBuffer.append( token );
-                }
-
-                String artifactId = artifactIdBuffer.toString();
-
-                if ( artifactId.length() > 0 )
-                {
-                    int lastVersionCharIdx = versionBuffer.length() - 1;
-                    if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
-                    {
-                        versionBuffer.setLength( lastVersionCharIdx );
-                    }
-
-                    String version = versionBuffer.toString();
-
-                    if ( version.length() > 0 )
-                    {
-                        if ( classifierBuffer.length() > 0 )
-                        {
-                            result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
-                                                                                   classifierBuffer.toString() );
-                        }
-                        else
-                        {
-                            result = artifactFactory.createArtifact( groupId, artifactId, version,
-                                                                     Artifact.SCOPE_RUNTIME, type );
-                        }
-                    }
-                    else
-                    {
-                        throw new DiscovererException( "Path filename version is empty" );
-                    }
-                }
-                else
-                {
-                    throw new DiscovererException( "Path filename artifactId is empty" );
-                }
-            }
-            else
-            {
-                throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
-            }
-        }
-        else
-        {
-            throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
-        }
-
-        return result;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java
deleted file mode 100644 (file)
index ff74a92..0000000
+++ /dev/null
@@ -1,57 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for discovering metadata files.
- */
-public interface MetadataDiscoverer
-    extends Discoverer
-{
-    String ROLE = MetadataDiscoverer.class.getName();
-
-    /**
-     * Search for metadata files in the repository.
-     *
-     * @param repository          The repository.
-     * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
-     * @param metadataFilter      filter to use on the discovered metadata before returning
-     * @return the list of artifacts found
-     * @throws DiscovererException if there is a problem during the discovery process
-     */
-    List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter )
-        throws DiscovererException;
-
-    /**
-     * Search for metadata files in the repository.
-     *
-     * @param repository          The repository.
-     * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
-     * @return the list of artifacts found
-     * @throws DiscovererException if there is a problem during the discovery process
-     */
-    List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
-        throws DiscovererException;
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java
new file mode 100644 (file)
index 0000000..9aacbc3
--- /dev/null
@@ -0,0 +1,210 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.SystemUtils;
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.DirectoryWalkListener;
+import org.codehaus.plexus.util.SelectorUtils;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * RepositoryScanner - this is an instance of a scan against a repository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class RepositoryScanner
+    implements DirectoryWalkListener
+{
+    public static final String ROLE = RepositoryScanner.class.getName();
+
+    private List consumers;
+
+    private ArtifactRepository repository;
+
+    private Logger logger;
+
+    private boolean isCaseSensitive = true;
+
+    private DiscovererStatistics stats;
+
+    private long onlyModifiedAfterTimestamp = 0;
+
+    public RepositoryScanner( ArtifactRepository repository, List consumerList )
+    {
+        this.repository = repository;
+        this.consumers = consumerList;
+        stats = new DiscovererStatistics( repository );
+
+        Iterator it = this.consumers.iterator();
+        while ( it.hasNext() )
+        {
+            Consumer consumer = (Consumer) it.next();
+
+            if ( !consumer.init( this.repository ) )
+            {
+                throw new IllegalStateException( "Consumer [" + consumer.getName()
+                    + "] is reporting that it is incompatible with the [" + repository.getId() + "] repository." );
+            }
+        }
+
+        if ( SystemUtils.IS_OS_WINDOWS )
+        {
+            isCaseSensitive = false;
+        }
+    }
+
+    public DiscovererStatistics getStatistics()
+    {
+        return stats;
+    }
+
+    public void directoryWalkStarting( File basedir )
+    {
+        getLogger().info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getBasedir() );
+        stats.reset();
+        stats.timestampStarted = System.currentTimeMillis();
+    }
+
+    public void directoryWalkStep( int percentage, File file )
+    {
+        getLogger().debug( "Walk Step: " + percentage + ", " + file );
+
+        // Timestamp finished points to the last successful scan, not this current one.
+        if ( file.lastModified() < onlyModifiedAfterTimestamp )
+        {
+            // Skip file as no change has occured.
+            getLogger().debug( "Skipping, No Change: " + file.getAbsolutePath() );
+            stats.filesSkipped++;
+            return;
+        }
+
+        synchronized ( consumers )
+        {
+            stats.filesIncluded++;
+
+            BaseFile basefile = new BaseFile( repository.getBasedir(), file );
+
+            Iterator itConsumers = this.consumers.iterator();
+            while ( itConsumers.hasNext() )
+            {
+                Consumer consumer = (Consumer) itConsumers.next();
+
+                if ( wantsFile( consumer, basefile.getRelativePath() ) )
+                {
+                    try
+                    {
+                        getLogger().debug( "Sending to consumer: " + consumer.getName() );
+                        stats.filesConsumed++;
+                        consumer.processFile( basefile );
+                    }
+                    catch ( Exception e )
+                    {
+                        /* Intentionally Catch all exceptions.
+                         * So that the discoverer processing can continue.
+                         */
+                        getLogger().error(
+                                           "Consumer [" + consumer.getName() + "] had an error when processing file ["
+                                               + basefile.getAbsolutePath() + "]: " + e.getMessage(), e );
+                    }
+                }
+                else
+                {
+                    getLogger().debug(
+                                       "Skipping consumer " + consumer.getName() + " for file "
+                                           + basefile.getRelativePath() );
+                }
+            }
+        }
+    }
+
+    public void directoryWalkFinished()
+    {
+        getLogger().info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getBasedir() );
+        stats.timestampFinished = System.currentTimeMillis();
+    }
+
+    private boolean wantsFile( Consumer consumer, String relativePath )
+    {
+        Iterator it;
+
+        // Test excludes first.
+        it = consumer.getExcludePatterns().iterator();
+        while ( it.hasNext() )
+        {
+            String pattern = (String) it.next();
+            if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+            {
+                // Definately does NOT WANT FILE.
+                return false;
+            }
+        }
+
+        // Now test includes.
+        it = consumer.getIncludePatterns().iterator();
+        while ( it.hasNext() )
+        {
+            String pattern = (String) it.next();
+            if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+            {
+                // Specifically WANTS FILE.
+                return true;
+            }
+        }
+
+        // Not included, and Not excluded?  Default to EXCLUDE.
+        return false;
+    }
+    
+    public long getOnlyModifiedAfterTimestamp()
+    {
+        return onlyModifiedAfterTimestamp;
+    }
+
+    public void setOnlyModifiedAfterTimestamp( long onlyModifiedAfterTimestamp )
+    {
+        this.onlyModifiedAfterTimestamp = onlyModifiedAfterTimestamp;
+    }
+
+    /**
+     * Debug method from DirectoryWalker.
+     */
+    public void debug( String message )
+    {
+        getLogger().debug( "Repository Scanner: " + message );
+    }
+
+    public Logger getLogger()
+    {
+        return logger;
+    }
+
+    public void setLogger( Logger logger )
+    {
+        this.logger = logger;
+    }
+}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java
deleted file mode 100644 (file)
index 0a85ba1..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllArtifactFilter
-    implements ArtifactFilter
-{
-    public boolean include( Artifact artifact )
-    {
-        return true;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java
deleted file mode 100644 (file)
index 80b49a8..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllMetadataFilter
-    implements MetadataFilter
-{
-    public boolean include( RepositoryMetadata metadata, long timestamp )
-    {
-        return true;
-    }
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java
deleted file mode 100644 (file)
index fff5fb5..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Ability to filter repository metadata lists.
- *
- * @todo should be in maven-artifact
- */
-public interface MetadataFilter
-{
-    /**
-     * Whether to include this metadata in the filtered list.
-     *
-     * @param metadata  the metadata
-     * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated
-     * @return whether to include it
-     */
-    boolean include( RepositoryMetadata metadata, long timestamp );
-}
diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java
deleted file mode 100644 (file)
index a3c4577..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * A filter to remove snapshot artifacts during discovery.
- */
-public class SnapshotArtifactFilter
-    implements ArtifactFilter
-{
-    public boolean include( Artifact artifact )
-    {
-        return !artifact.isSnapshot();
-    }
-}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java
deleted file mode 100644 (file)
index f1609e3..0000000
+++ /dev/null
@@ -1,90 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public abstract class AbstractArtifactDiscovererTest
-    extends PlexusTestCase
-{
-    protected ArtifactDiscoverer discoverer;
-
-    private ArtifactFactory factory;
-
-    protected ArtifactRepository repository;
-
-    protected abstract String getLayout();
-
-    protected abstract File getRepositoryFile();
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
-
-        factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        repository = getRepository();
-    }
-
-    protected ArtifactRepository getRepository()
-        throws Exception
-    {
-        File basedir = getRepositoryFile();
-
-        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
-        ArtifactRepositoryLayout layout =
-            (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
-
-        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version )
-    {
-        Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
-        artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
-        artifact.setRepository( repository );
-        return artifact;
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
-    {
-        return factory.createArtifact( groupId, artifactId, version, null, type );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
-                                       String classifier )
-    {
-        return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
-    }
-}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java
new file mode 100644 (file)
index 0000000..e718561
--- /dev/null
@@ -0,0 +1,86 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ */
+public abstract class AbstractDiscovererTestCase
+    extends PlexusTestCase
+{
+    protected Discoverer discoverer;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        discoverer = (Discoverer) lookup( Discoverer.ROLE );
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        release( discoverer );
+        super.tearDown();
+    }
+
+    protected ArtifactRepository getLegacyRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    protected ArtifactRepository getDefaultRepository()
+        throws Exception
+    {
+        File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+        ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+        resetRepositoryState( repository );
+        return repository;
+    }
+
+    protected void resetRepositoryState( ArtifactRepository repository )
+    {
+        // Implement any kind of repository cleanup.
+    }
+
+    protected ArtifactRepository createRepository( File basedir, String layout )
+        throws Exception
+    {
+        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+        ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+        return factory.createArtifactRepository( "discoveryRepo-" + getName(), "file://" + basedir, repoLayout, null,
+                                                 null );
+    }
+}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java
new file mode 100644 (file)
index 0000000..a32460b
--- /dev/null
@@ -0,0 +1,43 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - added to allow IDE users to pull all tests into their tool. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( DefaultDiscovererTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java
deleted file mode 100644 (file)
index cd20645..0000000
+++ /dev/null
@@ -1,702 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the default artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:DefaultArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class DefaultArtifactDiscovererTest
-    extends AbstractArtifactDiscovererTest
-{
-    private static final List JAVAX_BLACKLIST = Collections.singletonList( "javax/**" );
-
-    protected String getLayout()
-    {
-        return "default";
-    }
-
-    protected File getRepositoryFile()
-    {
-        return getTestFile( "src/test/repository" );
-    }
-
-    public void testDefaultExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            boolean b = path.indexOf( "CVS" ) >= 0;
-            if ( b )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
-            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
-        }
-    }
-
-    public void testStandardExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "KEYS".equals( path ) )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testBlacklistedExclude()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithBlacklist( JAVAX_BLACKLIST );
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-    }
-
-    public void testKickoutWithShortPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
-                              dPath.getComment() );
-
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongArtifactId()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
-                path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not wrong jar",
-                         "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoType()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not have an extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithLongerVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongSnapshotVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithSnapshotBaseVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
-                path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
-                         "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
-    }
-
-    public void testArtifactWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
-    }
-
-    public void testJavaSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
-    }
-
-    public void testTestSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources" ) ) );
-    }
-
-    public void testDistributionInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check zip included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
-        assertTrue( "Check tar.gz included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
-    }
-
-    public void testSnapshotInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertTrue( "Check snapshot included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testSnapshotInclusionWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check snapshot included", artifacts.contains(
-            createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
-    }
-
-    public void testSnapshotExclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertFalse( "Check snapshot included",
-                     artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
-    }
-
-    public void testFileSet()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check file is set", artifact.getFile() );
-        }
-    }
-
-    public void testRepositorySet()
-        throws MalformedURLException, DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        String url = repository.getUrl();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check repository set", artifact.getRepository() );
-            assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
-        }
-    }
-
-    public void testStandalonePoms()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-
-        // cull down to actual artifacts (only standalone poms will have type = pom)
-        Map keyedArtifacts = new HashMap();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
-            if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
-            {
-                keyedArtifacts.put( key, a );
-            }
-        }
-
-        List models = new ArrayList();
-
-        for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-
-            if ( "pom".equals( a.getType() ) )
-            {
-                models.add( a );
-            }
-        }
-
-        assertEquals( 4, models.size() );
-
-        // Define order we expect
-        Collections.sort( models );
-
-        Iterator itr = models.iterator();
-        Artifact model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "B", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "B", model.getArtifactId() );
-        assertEquals( "2.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.maven", model.getGroupId() );
-        assertEquals( "discovery", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-        model = (Artifact) itr.next();
-        assertEquals( "org.apache.testgroup", model.getGroupId() );
-        assertEquals( "discovery", model.getArtifactId() );
-        assertEquals( "1.0", model.getVersion() );
-    }
-
-    public void testShortPath()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
-
-            fail( "Artifact should be null for short paths" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongArtifactId()
-        throws ComponentLookupException
-    {
-
-        try
-        {
-            discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
-
-            fail( "Artifact should be null for wrong ArtifactId" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoType()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
-            fail( "Artifact should be null for no type" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
-
-            fail( "Artifact should be null for wrong version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testLongVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
-
-            fail( "Artifact should be null for long version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testWrongSnapshotVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
-
-            fail( "Artifact should be null for wrong snapshot version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testSnapshotBaseVersion()
-        throws ComponentLookupException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
-
-            fail( "Artifact should be null for snapshot base version" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testPathWithClassifier()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
-    }
-
-    public void testWithJavaSourceInclusion()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
-    }
-
-    public void testDistributionArtifacts()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
-
-        testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
-
-        artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
-    }
-
-    public void testSnapshot()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
-
-        testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
-
-        artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
-    }
-
-    public void testNormal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
-    }
-
-    public void testSnapshotWithClassifier()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
-                      artifact );
-    }
-
-    private List discoverArtifactsWithSnapshots()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithBlacklist( List list )
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, list, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifacts()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
-    }
-}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java
new file mode 100644 (file)
index 0000000..f105fc6
--- /dev/null
@@ -0,0 +1,169 @@
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.logging.console.ConsoleLogger;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DefaultDiscovererTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultDiscovererTest
+    extends AbstractDiscovererTestCase
+{
+    private MockConsumer createAndAddMockConsumer( List consumers, String includePattern, String excludePattern )
+    {
+        MockConsumer mockConsumer = new MockConsumer();
+        mockConsumer.getIncludePatterns().add( includePattern );
+        if ( StringUtils.isNotBlank( excludePattern ) )
+        {
+            mockConsumer.getExcludePatterns().add( excludePattern );
+        }
+        consumers.add( mockConsumer );
+        return mockConsumer;
+    }
+    
+    private void assertFilesProcessed( int expectedFileCount, DiscovererStatistics stats, MockConsumer mockConsumer )
+    {
+        assertNotNull( "Stats should not be null.", stats );
+        assertNotNull( "MockConsumer should not be null.", mockConsumer );
+        assertNotNull( "MockConsumer.filesProcessed should not be null.", mockConsumer.getFilesProcessed() );
+
+        if ( stats.getFilesConsumed() != mockConsumer.getFilesProcessed().size() )
+        {
+            fail( "Somehow, the stats count of files consumed, and the count of actual files "
+                + "processed by the consumer do not match." );
+        }
+
+        int actualFileCount = mockConsumer.getFilesProcessed().size();
+
+        if ( expectedFileCount != actualFileCount )
+        {
+            stats.dump( new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) );
+            System.out.println( "Base Dir:" + stats.getRepository().getBasedir() );
+            int num = 0;
+            Iterator it = mockConsumer.getFilesProcessed().iterator();
+            while ( it.hasNext() )
+            {
+                BaseFile file = (BaseFile) it.next();
+                System.out.println( "  Processed File [" + num + "]: " + file.getRelativePath() );
+                num++;
+            }
+
+            fail( "Files Processed mismatch: expected:<" + expectedFileCount + ">, actual:<" + actualFileCount + ">" );
+        }
+    }
+
+    public void testLegacyLayoutRepositoryAll()
+        throws Exception
+    {
+        ArtifactRepository repository = getLegacyRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null );
+
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+
+        assertFilesProcessed( 16, stats, mockConsumer );
+    }
+
+    public void testDefaultLayoutRepositoryAll()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null );
+
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+        
+        assertFilesProcessed( 42, stats, mockConsumer );
+    }
+
+    public void testDefaultLayoutRepositoryPomsOnly()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.pom", null );
+
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+
+        assertFilesProcessed( 10, stats, mockConsumer );
+    }
+
+    public void testDefaultLayoutRepositoryJarsOnly()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+        assertNotNull( stats );
+
+        assertFilesProcessed( 17, stats, mockConsumer );
+    }
+
+    public void testDefaultLayoutRepositoryJarsNoSnapshots()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false );
+
+        assertNotNull( stats );
+
+        assertFilesProcessed( 13, stats, mockConsumer );
+    }
+
+    public void testDefaultLayoutRepositoryJarsNoSnapshotsWithExclusions()
+        throws Exception
+    {
+        ArtifactRepository repository = getDefaultRepository();
+        List consumers = new ArrayList();
+        MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+        List exclusions = new ArrayList();
+        exclusions.add( "**/*-client.jar" );
+        DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false, 0, exclusions, null );
+
+        assertNotNull( stats );
+
+        assertFilesProcessed( 12, stats, mockConsumer );
+    }
+}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java
deleted file mode 100644 (file)
index 50ac62b..0000000
+++ /dev/null
@@ -1,199 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * This class tests the DefaultMetadataDiscoverer class.
- */
-public class DefaultMetadataDiscovererTest
-    extends PlexusTestCase
-{
-    private MetadataDiscoverer discoverer;
-
-    private static final String TEST_OPERATION = "test";
-
-    private ArtifactRepository repository;
-
-    private ArtifactFactory factory;
-
-    /**
-     *
-     */
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
-
-        factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        repository = getRepository();
-
-        removeTimestampMetadata();
-    }
-
-    protected ArtifactRepository getRepository()
-        throws Exception
-    {
-        File basedir = getTestFile( "src/test/repository" );
-
-        ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
-        ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
-        return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
-    }
-
-    /**
-     *
-     */
-    public void tearDown()
-        throws Exception
-    {
-        super.tearDown();
-        discoverer = null;
-    }
-
-    /**
-     * Test if metadata file in wrong directory was added to the kickedOutPaths.
-     */
-    public void testKickoutWrongDirectory()
-        throws DiscovererException
-    {
-        discoverer.discoverMetadata( repository, null );
-        Iterator iter = discoverer.getKickedOutPathsIterator();
-        boolean found = false;
-        while ( iter.hasNext() && !found )
-        {
-            DiscovererPath dPath = (DiscovererPath) iter.next();
-            String dir = dPath.getPath();
-
-            String normalizedDir = dir.replace( '\\', '/' );
-            if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( found );
-    }
-
-    /**
-     * Test if blank metadata file was added to the kickedOutPaths.
-     */
-    public void testKickoutBlankMetadata()
-        throws DiscovererException
-    {
-        discoverer.discoverMetadata( repository, null );
-        Iterator iter = discoverer.getKickedOutPathsIterator();
-        boolean found = false;
-        while ( iter.hasNext() && !found )
-        {
-            DiscovererPath dPath = (DiscovererPath) iter.next();
-            String dir = dPath.getPath();
-
-            String normalizedDir = dir.replace( '\\', '/' );
-            if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
-            {
-                found = true;
-                assertTrue( "Check reason for kickout", dPath.getComment().matches(
-                    "Error reading metadata file '(.*)': input contained no data" ) );
-            }
-        }
-        assertTrue( found );
-    }
-
-    private void removeTimestampMetadata()
-        throws IOException
-    {
-        // remove the metadata that tracks time
-        File file = new File( repository.getBasedir(), "maven-metadata.xml" );
-        System.gc(); // for Windows
-        file.delete();
-        assertFalse( file.exists() );
-    }
-
-    public void testDiscoverMetadata()
-        throws DiscovererException
-    {
-        List metadataPaths = discoverer.discoverMetadata( repository, null );
-        assertNotNull( "Check metadata not null", metadataPaths );
-
-        RepositoryMetadata metadata =
-            new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
-        metadata =
-            new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
-        metadata = new GroupRepositoryMetadata( "org.apache.maven" );
-        assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-    }
-
-    protected Artifact createArtifact( String groupId, String artifactId )
-    {
-        return createArtifact( groupId, artifactId, "1.0" );
-    }
-
-    private Artifact createArtifact( String groupId, String artifactId, String version )
-    {
-        return factory.createArtifact( groupId, artifactId, version, null, "jar" );
-    }
-
-    private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
-    {
-        for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
-        {
-            RepositoryMetadata m = (RepositoryMetadata) i.next();
-
-            if ( m.getGroupId().equals( metadata.getGroupId() ) )
-            {
-                if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
-                {
-                    return true;
-                }
-                else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
-                {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java
deleted file mode 100644 (file)
index d0b2454..0000000
+++ /dev/null
@@ -1,537 +0,0 @@
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Test the legacy artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:LegacyArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class LegacyArtifactDiscovererTest
-    extends AbstractArtifactDiscovererTest
-{
-    private static final List JAVAX_SQL_BLACKLIST = Collections.singletonList( "javax.sql/**" );
-
-    protected String getLayout()
-    {
-        return "legacy";
-    }
-
-    protected File getRepositoryFile()
-    {
-        return getTestFile( "src/test/legacy-repository" );
-    }
-
-    public void testDefaultExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( path.indexOf( "CVS" ) >= 0 )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
-            assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
-        }
-    }
-
-    public void testStandardExcludes()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "KEYS".equals( path ) )
-            {
-                found = true;
-                assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testBlacklistedExclude()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithBlacklist();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check exclusion was found", found );
-
-        assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-    }
-
-    public void testKickoutWithShortPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Path does not match a legacy repository path for an artifact", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithLongPath()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout",
-                              "Path does not match a legacy repository path for an artifact", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithInvalidType()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoExtension()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename does not have an extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithWrongExtension()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path type does not match the extension",
-                              dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testKickoutWithNoVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-        boolean found = false;
-        for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
-        {
-            DiscovererPath dPath = (DiscovererPath) i.next();
-
-            String path = dPath.getPath();
-
-            if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
-            {
-                found = true;
-                assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
-            }
-        }
-        assertTrue( "Check kickout was found", found );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact a = (Artifact) i.next();
-            assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
-        }
-    }
-
-    public void testInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
-    }
-
-    public void testTextualVersion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
-    }
-
-    public void testArtifactWithClassifier()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
-    }
-
-    public void testJavaSourcesInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains(
-            createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
-    }
-
-    public void testDistributionInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check zip included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
-        assertTrue( "Check tar.gz included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
-    }
-
-    public void testSnapshotInclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertTrue( "Check snapshot included",
-                    artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testSnapshotExclusion()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifacts();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
-        assertFalse( "Check snapshot included",
-                     artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
-    }
-
-    public void testFileSet()
-        throws DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check file is set", artifact.getFile() );
-        }
-    }
-
-    public void testRepositorySet()
-        throws MalformedURLException, DiscovererException
-    {
-        List artifacts = discoverArtifactsWithSnapshots();
-        assertNotNull( "Check artifacts not null", artifacts );
-
-        String url = repository.getUrl();
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-            assertNotNull( "Check repository set", artifact.getRepository() );
-            assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
-        }
-    }
-
-    public void testWrongArtifactPackaging()
-        throws ComponentLookupException, DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
-
-            fail( "Artifact should be null for wrong package extension" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoArtifactId()
-        throws DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
-
-            fail( "Artifact should be null when artifactId is missing" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-
-        try
-        {
-            discoverer.buildArtifact( "groupId/jars/1.0.jar" );
-
-            fail( "Artifact should be null when artifactId is missing" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testNoType()
-        throws ComponentLookupException, DiscovererException
-    {
-        try
-        {
-            discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
-            fail( "Artifact should be null for no type" );
-        }
-        catch ( DiscovererException e )
-        {
-            // excellent
-        }
-    }
-
-    public void testSnapshot()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
-    }
-
-    public void testFinal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
-    }
-
-    public void testNormal()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/jars/jdbc-2.0.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
-    }
-
-    public void testJavadoc()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc" ), artifact );
-    }
-
-    public void testSources()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "javax.sql/java-sources/jdbc-2.0-sources.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources" ), artifact );
-    }
-
-    public void testPlugin()
-        throws ComponentLookupException, DiscovererException
-    {
-        String testPath = "maven/plugins/maven-test-plugin-1.8.jar";
-
-        Artifact artifact = discoverer.buildArtifact( testPath );
-
-        assertEquals( createArtifact( "maven", "maven-test-plugin", "1.8", "plugin" ), artifact );
-    }
-
-
-    private List discoverArtifacts()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithBlacklist()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, JAVAX_SQL_BLACKLIST, new SnapshotArtifactFilter() );
-    }
-
-    private List discoverArtifactsWithSnapshots()
-        throws DiscovererException
-    {
-        return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
-    }
-}
diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java
new file mode 100644 (file)
index 0000000..7f081c9
--- /dev/null
@@ -0,0 +1,75 @@
+/**
+ * 
+ */
+package org.apache.maven.archiva.discoverer;
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.consumers.ConsumerException;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class MockConsumer
+    implements Consumer
+{
+    private List excludePatterns = new ArrayList();
+
+    private List includePatterns = new ArrayList();
+
+    private List filesProcessed = new ArrayList();
+
+    private int countFileProblems = 0;
+
+    public String getName()
+    {
+        return "MockConsumer (Testing Only)";
+    }
+
+    public boolean init( ArtifactRepository repository )
+    {
+        return true;
+    }
+
+    public void processFile( BaseFile file )
+        throws ConsumerException
+    {
+        filesProcessed.add( file );
+    }
+
+    public void processFileProblem( BaseFile file, String message )
+    {
+        countFileProblems++;
+    }
+
+    public List getExcludePatterns()
+    {
+        return excludePatterns;
+    }
+
+    public void setExcludePatterns( List excludePatterns )
+    {
+        this.excludePatterns = excludePatterns;
+    }
+
+    public List getIncludePatterns()
+    {
+        return includePatterns;
+    }
+
+    public void setIncludePatterns( List includePatterns )
+    {
+        this.includePatterns = includePatterns;
+    }
+
+    public int getCountFileProblems()
+    {
+        return countFileProblems;
+    }
+
+    public List getFilesProcessed()
+    {
+        return filesProcessed;
+    }
+}
\ No newline at end of file
index caf5b66978b523992a7f1ac328cc2ca76e49ea5e..b3baf545d4fdfac1935e8c115cc33ef049f027bd 100644 (file)
@@ -18,6 +18,7 @@
   ~ under the License.
   -->
 
+<!-- This metdata is intentionally wrong.  -->
 <metadata>
   <groupId>javax.sql</groupId>
   <artifactId>jdbc</artifactId>
index 4f004c5a1de8a70f0abd6e843d0231cc722159f8..10d2c366eedc3fd3ddb461e495e5ced67c69550e 100644 (file)
@@ -27,7 +27,7 @@
   </parent>
   <modelVersion>4.0.0</modelVersion>
   <artifactId>archiva-indexer</artifactId>
-  <name>Archiva Repository Indexer</name>
+  <name>Archiva Indexer</name>
   <dependencies>
     <dependency>
       <groupId>org.apache.maven</groupId>
index 9535a6c70a9c7d7444fff59323c01e1bc7139c84..b4bdbd4f7c62fbc3852d30109a7e1ef71f61e5eb 100644 (file)
@@ -21,6 +21,7 @@ package org.apache.maven.archiva.indexer;
 
 import org.apache.maven.archiva.indexer.query.Query;
 import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
 
 import java.util.Collection;
 import java.util.List;
@@ -89,6 +90,17 @@ public interface RepositoryArtifactIndex
     Collection getAllRecordKeys()
         throws RepositoryIndexException;
 
+    /**
+     * Indexes the artifact specified. If the artifact is already in the repository they it is updated. 
+     * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
+     *
+     * @param artifact  the artifact to index
+     * @param factory   the artifact to record factory
+     * @throws RepositoryIndexException if there is a problem indexing the artifacts
+     */
+    void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+        throws RepositoryIndexException;
+    
     /**
      * Indexes the artifacts found within the specified list. If the artifacts are already in the
      * repository they are updated. This method should use less memory than indexRecords as the records can be
index 5b5f68beacf69ffe5e6c930502bc2b6a64e55054..c0a02935dd57a297e5b4279ee4e0bf8b42e6d0b1 100644 (file)
@@ -351,6 +351,40 @@ public class LuceneRepositoryArtifactIndex
             lastUpdatedTime = System.currentTimeMillis();
         }
     }
+    
+    public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+        throws RepositoryIndexException
+    {
+        IndexModifier indexModifier = null;
+        try
+        {
+            indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+            RepositoryIndexRecord record = factory.createRecord( artifact );
+
+            if ( record != null )
+            {
+                Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+                indexModifier.deleteDocuments( term );
+
+                Document document = converter.convert( record );
+                document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+                indexModifier.addDocument( document );
+            }
+            indexModifier.optimize();
+        }
+        catch ( IOException e )
+        {
+            throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+        }
+        finally
+        {
+            closeQuietly( indexModifier );
+            lastUpdatedTime = System.currentTimeMillis();
+        }
+    }    
 
     public List getAllGroupIds()
         throws RepositoryIndexException
index 8ab966ddeb9f5190728668fb126cb5f84dd8ca67..23488ee0a526ea450d4deddf331f76410ecd965b 100644 (file)
   <dependencies>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-discoverer</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.maven</groupId>
-      <artifactId>maven-artifact</artifactId>
+      <artifactId>archiva-common</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.maven.wagon</groupId>
index f4a77f0c74aab22f5bfd0fee314c9c3b001184d8..a22fdb872c9b479a0dbb698c12d4da9eeaad9f0e 100644 (file)
@@ -21,8 +21,8 @@ package org.apache.maven.archiva.proxy;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.common.artifact.builder.BuilderException;
+import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -85,17 +85,17 @@ public class DefaultProxyRequestHandler
      * @plexus.requirement
      */
     private ArtifactFactory factory;
-
+    
     /**
      * @plexus.requirement role-hint="default"
-     * @todo use a map, and have priorities in them
+     * @todo use a map, and have priorities in them.
      */
-    private ArtifactDiscoverer defaultArtifactDiscoverer;
-
+    private LayoutArtifactBuilder defaultArtifactBuilder;
+    
     /**
      * @plexus.requirement role-hint="legacy"
      */
-    private ArtifactDiscoverer legacyArtifactDiscoverer;
+    private LayoutArtifactBuilder legacyArtifactBuilder;
 
     /**
      * @plexus.requirement role="org.apache.maven.wagon.Wagon"
@@ -159,10 +159,10 @@ public class DefaultProxyRequestHandler
             Artifact artifact = null;
             try
             {
-                artifact = defaultArtifactDiscoverer.buildArtifact( artifactPath );
+                artifact = defaultArtifactBuilder.build( artifactPath );
                 getLogger().debug( "Artifact requested is: " + artifact );
             }
-            catch ( DiscovererException e )
+            catch ( BuilderException e )
             {
                 msg = "Failed to build artifact from path:\n\tfrom default: " + e.getMessage();
             }
@@ -171,10 +171,10 @@ public class DefaultProxyRequestHandler
             {
                 try
                 {
-                    artifact = legacyArtifactDiscoverer.buildArtifact( artifactPath );
+                    artifact = legacyArtifactBuilder.build( artifactPath );
                     getLogger().debug( "Artifact requested is: " + artifact );
                 }
-                catch ( DiscovererException e )
+                catch ( BuilderException e )
                 {
                     getLogger().debug( msg + "\n\tfrom legacy: " + e.getMessage() );
                 }
index 37c1a1e5397b1707fa4186c52ae723da31ed08da..3e8ac02010862cc1cef2b9cdd29e8da4f4fa3fea 100644 (file)
@@ -28,7 +28,6 @@
       <role>org.codehaus.plexus.logging.LoggerManager</role>
       <implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
       <lifecycle-handler>basic</lifecycle-handler>
-
       <configuration>
         <threshold>ERROR</threshold>
       </configuration>
index 2f0dd064427939837ff8cbda81df5ea930f96347..24eb199548a9db8ed4f35740ce114396cced26a5 100755 (executable)
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.codehaus.plexus</groupId>
+      <artifactId>plexus-jdo2</artifactId>
+      <version>1.0-alpha-8</version>
+      <exclusions>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xercesImpl</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>xerces</groupId>
+          <artifactId>xmlParserAPIs</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>jpox</groupId>
+      <artifactId>jpox</artifactId>
+      <version>1.1.6</version>
+      <scope>compile</scope>
+      <exclusions>
+        <!-- targeting JDK 1.4 we don't need this -->
+        <exclusion>
+          <groupId>javax.sql</groupId>
+          <artifactId>jdbc-stdext</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <!--  TEST DEPS -->
+    <dependency>
+      <groupId>hsqldb</groupId>
+      <artifactId>hsqldb</artifactId>
+      <version>1.7.3.3</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
         <groupId>org.codehaus.modello</groupId>
         <artifactId>modello-maven-plugin</artifactId>
         <version>1.0-alpha-14-SNAPSHOT</version>
+        <configuration>
+          <version>1.0.0</version>
+          <packageWithVersion>false</packageWithVersion>
+          <model>src/main/mdo/reporting.mdo</model>
+        </configuration>
         <executions>
           <execution>
+            <id>modello-java</id>
             <goals>
-              <goal>xpp3-writer</goal>
               <goal>java</goal>
+              <goal>jpox-metadata-class</goal>
+              <!--
+              <goal>xpp3-writer</goal>
               <goal>xpp3-reader</goal>
+               -->
+            </goals>
+          </execution>
+          <execution>
+            <id>jpox-jdo-mapping</id>
+            <goals>
+              <goal>jpox-jdo-mapping</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${basedir}/target/classes/org/apache/maven/archiva/reporting/model/</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+       <groupId>org.codehaus.mojo</groupId>
+        <artifactId>jpox-maven-plugin</artifactId>
+        <version>1.1.6-SNAPSHOT</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>enhance</goal>
             </goals>
           </execution>
         </executions>
-        <configuration>
-          <version>1.0.0</version>
-          <model>src/main/mdo/reporting.mdo</model>
-        </configuration>
       </plugin>
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java
new file mode 100644 (file)
index 0000000..2854bef
--- /dev/null
@@ -0,0 +1,50 @@
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ReportingException 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ReportingException
+    extends Exception
+{
+
+    public ReportingException()
+    {
+    }
+
+    public ReportingException( String message )
+    {
+        super( message );
+    }
+
+    public ReportingException( Throwable cause )
+    {
+        super( cause );
+    }
+
+    public ReportingException( String message, Throwable cause )
+    {
+        super( message, cause );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java
new file mode 100644 (file)
index 0000000..f474ef6
--- /dev/null
@@ -0,0 +1,237 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+
+import java.util.List;
+
+import javax.jdo.Extent;
+import javax.jdo.JDOException;
+import javax.jdo.JDOHelper;
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.JDOUserException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * AbstractJdoResults - Base class for all JDO related results.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractJdoDatabase
+    implements Initializable
+{
+    /**
+     * @plexus.requirement role-hint="archiva"
+     */
+    private JdoFactory jdoFactory;
+
+    private PersistenceManagerFactory pmf;
+
+    // -------------------------------------------------------------------
+    // JPOX / JDO Specifics.
+    // -------------------------------------------------------------------
+
+    protected List getAllObjects( Class clazz, String ordering )
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            Extent extent = pm.getExtent( clazz, true );
+
+            Query query = pm.newQuery( extent );
+
+            if ( ordering != null )
+            {
+                query.setOrdering( ordering );
+            }
+
+//            for ( Iterator i = fetchGroups.iterator(); i.hasNext(); )
+//            {
+//                pm.getFetchPlan().addGroup( (String) i.next() );
+//            }
+
+            List result = (List) query.execute();
+
+            result = (List) pm.detachCopyAll( result );
+
+            tx.commit();
+
+            return result;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    protected Object getObjectByKey( Class clazz, Object key )
+        throws JDOObjectNotFoundException, JDOException
+    {
+        if ( key == null )
+        {
+            throw new JDOException( "Unable to get object from jdo using null key." );
+        }
+
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            //            if ( fetchGroup != null )
+            //            {
+            //                pm.getFetchPlan().addGroup( fetchGroup );
+            //            }
+
+            Object objectId = pm.newObjectIdInstance( clazz, key.toString() );
+
+            Object object = pm.getObjectById( objectId );
+
+            object = pm.detachCopy( object );
+
+            tx.commit();
+
+            return object;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    public void initialize()
+        throws InitializationException
+    {
+        pmf = jdoFactory.getPersistenceManagerFactory();
+    }
+
+    protected void removeObject( Object o )
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            o = pm.getObjectById( pm.getObjectId( o ) );
+
+            pm.deletePersistent( o );
+
+            tx.commit();
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    protected Object saveObject( Object object )
+    {
+        return saveObject( object, null );
+    }
+
+    protected Object saveObject( Object object, String fetchGroups[] )
+        throws JDOException
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            if ( ( JDOHelper.getObjectId( object ) != null ) && !JDOHelper.isDetached( object ) )
+            {
+                throw new JDOException( "Existing object is not detached: " + object );
+            }
+
+            if ( fetchGroups != null )
+            {
+                for ( int i = 0; i >= fetchGroups.length; i++ )
+                {
+                    pm.getFetchPlan().addGroup( fetchGroups[i] );
+                }
+            }
+
+            pm.makePersistent( object );
+
+            object = pm.detachCopy( object );
+
+            tx.commit();
+
+            return object;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    protected PersistenceManager getPersistenceManager()
+    {
+        PersistenceManager pm = pmf.getPersistenceManager();
+
+        pm.getFetchPlan().setMaxFetchDepth( -1 );
+
+        return pm;
+    }
+
+    protected static void closePersistenceManager( PersistenceManager pm )
+    {
+        try
+        {
+            pm.close();
+        }
+        catch ( JDOUserException e )
+        {
+            // ignore
+        }
+    }
+
+    protected static void rollbackIfActive( Transaction tx )
+    {
+        PersistenceManager pm = tx.getPersistenceManager();
+
+        try
+        {
+            if ( tx.isActive() )
+            {
+                tx.rollback();
+            }
+        }
+        finally
+        {
+            closePersistenceManager( pm );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java
new file mode 100644 (file)
index 0000000..0eee93b
--- /dev/null
@@ -0,0 +1,80 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.Result;
+
+/**
+ * AbstractResultsDatabase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractResultsDatabase
+    extends AbstractJdoDatabase
+{
+    /**
+     * <p>
+     * Get the number of failures in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of failures in the database.
+     */
+    public abstract int getNumFailures();
+
+    /**
+     * <p>
+     * Get the number of warnings in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of warnings in the database.
+     */
+    public abstract int getNumWarnings();
+    
+    /**
+     * <p>
+     * Get the number of notices in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of notices in the database.
+     */
+    public abstract int getNumNotices();
+    
+    protected static Result createResult( String processor, String problem, String reason )
+    {
+        Result result = new Result();
+        result.setProcessor( processor );
+        result.setProblem( problem );
+        result.setReason( reason );
+        return result;
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java
new file mode 100644 (file)
index 0000000..4314372
--- /dev/null
@@ -0,0 +1,282 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.ArtifactResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * ArtifactResultsDatabase - Database of ArtifactResults. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase"
+ */
+public class ArtifactResultsDatabase
+    extends AbstractResultsDatabase
+{
+    // -------------------------------------------------------------------
+    // ArtifactResults methods.
+    // -------------------------------------------------------------------
+
+    public static final String ROLE = ArtifactResultsDatabase.class.getName();
+
+    public void addFailure( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getFailures().contains( result ) )
+        {
+            results.addFailure( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addNotice( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getNotices().contains( result ) )
+        {
+            results.addNotice( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addWarning( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getWarnings().contains( result ) )
+        {
+            results.addWarning( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void clearResults( ArtifactResults results )
+    {
+        results.getFailures().clear();
+        results.getWarnings().clear();
+        results.getNotices().clear();
+
+        saveObject( results );
+    }
+
+    public List getAllArtifactResults()
+    {
+        return getAllObjects( ArtifactResults.class, null );
+    }
+
+    public Iterator getIterator()
+    {
+        List allartifacts = getAllArtifactResults();
+        if ( allartifacts == null )
+        {
+            return Collections.EMPTY_LIST.iterator();
+        }
+
+        return allartifacts.iterator();
+    }
+
+    public List findArtifactResults( String groupId, String artifactId, String version )
+    {
+        PersistenceManager pm = getPersistenceManager();
+        Transaction tx = pm.currentTransaction();
+
+        try
+        {
+            tx.begin();
+
+            Query query = pm.newQuery( "javax.jdo.query.JDOQL", "SELECT FROM " + ArtifactResults.class.getName()
+                + " WHERE groupId == findGroupId && " + " artifactId == findArtifactId && "
+                + " version == findVersionId" );
+            query.declareParameters( "String findGroupId, String findArtifactId, String findVersionId" );
+            query.setOrdering( "findArtifactId ascending" );
+
+            List result = (List) query.execute( groupId, artifactId, version );
+
+            result = (List) pm.detachCopyAll( result );
+
+            tx.commit();
+
+            return result;
+        }
+        finally
+        {
+            rollbackIfActive( tx );
+        }
+    }
+
+    public void remove( ArtifactResults results )
+    {
+        removeObject( results );
+    }
+
+    public void remove( Artifact artifact )
+    {
+        try
+        {
+            ArtifactResults results = lookupArtifactResults( artifact );
+            remove( results );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            // nothing to do.
+        }
+    }
+
+    /**
+     * Get an {@link ArtifactResults} from the store.
+     * If the store does not have one, create it.
+     * 
+     * Equivalent to calling {@link #lookupArtifactResults(Artifact)} then if
+     * not found, using {@link #createArtifactResults(Artifact)}.
+     * 
+     * @param artifact the artifact information
+     * @return the ArtifactResults object (may not be in database yet, so don't forget to {@link #saveObject(Object)})
+     * @see #lookupArtifactResults(Artifact)
+     * @see #createArtifactResults(Artifact)
+     */
+    public ArtifactResults getArtifactResults( Artifact artifact )
+    {
+        ArtifactResults results;
+
+        try
+        {
+            results = lookupArtifactResults( artifact );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            results = createArtifactResults( artifact );
+        }
+
+        return results;
+    }
+
+    /**
+     * Create a new {@link ArtifactResults} object from the provided Artifact information.
+     * 
+     * @param artifact the artifact information.
+     * @return the new {@link ArtifactResults} object.
+     * @see #getArtifactResults(Artifact)
+     * @see #lookupArtifactResults(Artifact)
+     */
+    private ArtifactResults createArtifactResults( Artifact artifact )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+         * type, classifier.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        ArtifactResults results = new ArtifactResults();
+        results.setGroupId( StringUtils.defaultString( artifact.getGroupId() ) );
+        results.setArtifactId( StringUtils.defaultString( artifact.getArtifactId() ) );
+        results.setVersion( StringUtils.defaultString( artifact.getVersion() ) );
+        results.setType( StringUtils.defaultString( artifact.getType() ) );
+        results.setClassifier( StringUtils.defaultString( artifact.getClassifier() ) );
+
+        return results;
+    }
+
+    /**
+     * Lookup the {@link ArtifactResults} in the JDO store from the information in
+     * the provided Artifact.
+     * 
+     * @param artifact the artifact information.
+     * @return the previously saved {@link ArtifactResults} from the JDO store.
+     * @throws JDOObjectNotFoundException if the {@link ArtifactResults} are not found.
+     * @see #getArtifactResults(Artifact)
+     * @see #createArtifactResults(Artifact)
+     */
+    private ArtifactResults lookupArtifactResults( Artifact artifact )
+        throws JDOObjectNotFoundException
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+         * type, classifier.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        ArtifactResultsKey key = new ArtifactResultsKey();
+        key.groupId = StringUtils.defaultString( artifact.getGroupId() );
+        key.artifactId = StringUtils.defaultString( artifact.getArtifactId() );
+        key.version = StringUtils.defaultString( artifact.getVersion() );
+        key.type = StringUtils.defaultString( artifact.getType() );
+        key.classifier = StringUtils.defaultString( artifact.getClassifier() );
+
+        return (ArtifactResults) getObjectByKey( ArtifactResults.class, key );
+    }
+
+    public int getNumFailures()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getFailures().size();
+        }
+        return count;
+    }
+
+    public int getNumNotices()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getNotices().size();
+        }
+        return count;
+    }
+
+    public int getNumWarnings()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            ArtifactResults results = (ArtifactResults) it.next();
+            count += results.getWarnings().size();
+        }
+        return count;
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java
new file mode 100644 (file)
index 0000000..82e62f1
--- /dev/null
@@ -0,0 +1,209 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.MetadataResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * MetadataResultsDatabase 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * 
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.MetadataResultsDatabase"
+ */
+public class MetadataResultsDatabase
+    extends AbstractResultsDatabase
+{
+    public static final String ROLE = MetadataResultsDatabase.class.getName();
+
+    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getFailures().contains( result ) )
+        {
+            results.addFailure( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getWarnings().contains( result ) )
+        {
+            results.addWarning( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata );
+        Result result = createResult( processor, problem, reason );
+
+        if ( !results.getNotices().contains( result ) )
+        {
+            results.addNotice( result );
+        }
+
+        saveObject( results );
+    }
+
+    public void clearResults( MetadataResults results )
+    {
+        results.getFailures().clear();
+        results.getWarnings().clear();
+        results.getNotices().clear();
+
+        saveObject( results );
+    }
+
+    public List getAllMetadataResults()
+    {
+        return getAllObjects( MetadataResults.class, null );
+    }
+
+    public Iterator getIterator()
+    {
+        List allmetadatas = getAllMetadataResults();
+        if ( allmetadatas == null )
+        {
+            return Collections.EMPTY_LIST.iterator();
+        }
+
+        return allmetadatas.iterator();
+    }
+
+    public void remove( MetadataResults results )
+    {
+        removeObject( results );
+    }
+
+    public void remove( RepositoryMetadata metadata )
+    {
+        try
+        {
+            MetadataResults results = lookupMetadataResults( metadata );
+            remove( results );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            // nothing to do.
+        }
+    }
+
+    public MetadataResults getMetadataResults( RepositoryMetadata metadata )
+    {
+        MetadataResults results;
+
+        try
+        {
+            results = lookupMetadataResults( metadata );
+        }
+        catch ( JDOObjectNotFoundException e )
+        {
+            results = createMetadataResults( metadata );
+        }
+
+        return results;
+    }
+
+    private MetadataResults createMetadataResults( RepositoryMetadata metadata )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        MetadataResults results = new MetadataResults();
+        results.setGroupId( StringUtils.defaultString( metadata.getGroupId() ) );
+        results.setArtifactId( StringUtils.defaultString( metadata.getArtifactId() ) );
+        results.setVersion( StringUtils.defaultString( metadata.getBaseVersion() ) );
+
+        return results;
+    }
+
+    private MetadataResults lookupMetadataResults( RepositoryMetadata metadata )
+    {
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        MetadataResultsKey key = new MetadataResultsKey();
+        key.groupId = StringUtils.defaultString( metadata.getGroupId(), "" );
+        key.artifactId = StringUtils.defaultString( metadata.getArtifactId(), "" );
+        key.version = StringUtils.defaultString( metadata.getBaseVersion(), "" );
+
+        return (MetadataResults) getObjectByKey( MetadataResults.class, key );
+    }
+
+    public int getNumFailures()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getFailures().size();
+        }
+        return count;
+    }
+
+    public int getNumNotices()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getNotices().size();
+        }
+        return count;
+    }
+
+    public int getNumWarnings()
+    {
+        int count = 0;
+        for ( Iterator it = getIterator(); it.hasNext(); )
+        {
+            MetadataResults results = (MetadataResults) it.next();
+            count += results.getWarnings().size();
+        }
+        return count;
+    }
+}
index 201ce4ba477f2db4c2ad0ba1616172924a9d87e5..4dd2430a55996b4239ed40805e4a61ca74e976ef 100644 (file)
@@ -19,618 +19,106 @@ package org.apache.maven.archiva.reporting.database;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Date;
-import java.util.HashMap;
 import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
 
 /**
+ * The Main Reporting Database.
+ * 
  * @todo i18n, including message formatting and parameterisation
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ReportingDatabase"
  */
 public class ReportingDatabase
 {
-    private final Reporting reporting;
-
-    private Map artifactMap;
-
-    private Map metadataMap;
-
-    private int numFailures;
-
-    private int numWarnings;
-
-    private ArtifactRepository repository;
-
-    private boolean inProgress;
-
-    private long startTime;
-
-    private final ReportGroup reportGroup;
-
-    private Set metadataWithProblems;
-
-    private Map filteredDatabases = new HashMap();
-
-    private int numNotices;
-
-    public ReportingDatabase( ReportGroup reportGroup )
-    {
-        this( reportGroup, new Reporting() );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
-    {
-        this( reportGroup, reporting, null );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
-    {
-        this( reportGroup, new Reporting(), repository );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
-    {
-        this.reportGroup = reportGroup;
-
-        this.reporting = reporting;
-
-        this.repository = repository;
-
-        initArtifactMap();
-
-        initMetadataMap();
-    }
-
-    public void addFailure( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getFailures().contains( result ) )
-        {
-            results.addFailure( result );
-            numFailures++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getNotices().contains( result ) )
-        {
-            results.addNotice( result );
-            numNotices++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getWarnings().contains( result ) )
-        {
-            results.addWarning( result );
-            numWarnings++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( artifact, processor, problem, reason );
-        }
-    }
-
-    ArtifactResults getArtifactResults( Artifact artifact )
-    {
-        return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                   artifact.getType(), artifact.getClassifier() );
-    }
-
-    private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
-                                                String classifier )
-    {
-        Map artifactMap = this.artifactMap;
-
-        String key = getArtifactKey( groupId, artifactId, version, type, classifier );
-        ArtifactResults results = (ArtifactResults) artifactMap.get( key );
-        if ( results == null )
-        {
-            results = new ArtifactResults();
-            results.setArtifactId( artifactId );
-            results.setClassifier( classifier );
-            results.setGroupId( groupId );
-            results.setType( type );
-            results.setVersion( version );
-
-            artifactMap.put( key, results );
-            reporting.getArtifacts().add( results );
-        }
-
-        return results;
-    }
-
-    private void initArtifactMap()
-    {
-        Map map = new HashMap();
-        for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-        {
-            ArtifactResults result = (ArtifactResults) i.next();
-
-            String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
-                                         result.getType(), result.getClassifier() );
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-        }
-        artifactMap = map;
-    }
-
-    private static String getArtifactKey( String groupId, String artifactId, String version, String type,
-                                          String classifier )
-    {
-        return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
-    }
-
-    private static Result createResult( String processor, String problem, String reason )
-    {
-        Result result = new Result();
-        result.setProcessor( processor );
-        result.setProblem( problem );
-        result.setReason( reason );
-        return result;
-    }
-
-    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getFailures().contains( result ) )
-        {
-            results.addFailure( result );
-            numFailures++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getWarnings().contains( result ) )
-        {
-            results.addWarning( result );
-            numWarnings++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        Result result = createResult( processor, problem, reason );
-        if ( !results.getNotices().contains( result ) )
-        {
-            results.addNotice( result );
-            numNotices++;
-        }
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( metadata, processor, problem, reason );
-        }
-    }
-
-    public Set getMetadataWithProblems()
-    {
-        return metadataWithProblems;
-    }
-
-    private void initMetadataMap()
-    {
-        Map map = new HashMap();
-        Set problems = new LinkedHashSet();
-
-        for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
-        {
-            MetadataResults result = (MetadataResults) i.next();
-
-            String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
-
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-
-            if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
-            {
-                problems.add( result );
-            }
-        }
-        metadataMap = map;
-        metadataWithProblems = problems;
-    }
+    public static final String ROLE = ReportingDatabase.class.getName();
 
-    private static String getMetadataKey( String groupId, String artifactId, String version )
-    {
-        return groupId + ":" + artifactId + ":" + version;
-    }
-
-    public int getNumFailures()
-    {
-        return numFailures;
-    }
-
-    public int getNumWarnings()
-    {
-        return numWarnings;
-    }
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase artifactDatabase;
 
-    public Reporting getReporting()
-    {
-        return reporting;
-    }
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase metadataDatabase;
 
     public Iterator getArtifactIterator()
     {
-        return reporting.getArtifacts().iterator();
+        return artifactDatabase.getIterator();
     }
 
     public Iterator getMetadataIterator()
     {
-        return reporting.getMetadata().iterator();
+        return metadataDatabase.getIterator();
     }
 
-    public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+    public void clear()
     {
-        String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
-        Map map = metadataMap;
-        MetadataResults results = (MetadataResults) map.get( key );
-        return results != null && results.getLastModified() >= timestamp;
     }
 
     /**
-     * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
-     *
-     * @param metadata     the metadata
-     * @param lastModified the modification time of the file being tracked
+     * <p>
+     * Get the number of failures in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of failures in the database.
      */
-    public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
-    {
-        MetadataResults results = getMetadataResults( metadata, lastModified );
-
-        results.setLastModified( lastModified );
-
-        numFailures -= results.getFailures().size();
-        results.getFailures().clear();
-
-        numWarnings -= results.getWarnings().size();
-        results.getWarnings().clear();
-
-        numNotices -= results.getWarnings().size();
-        results.getNotices().clear();
-
-        metadataWithProblems.remove( results );
-    }
-
-    MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
-    {
-        return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
-                                   lastModified );
-    }
-
-    private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
-                                                long lastModified )
-    {
-        String key = getMetadataKey( groupId, artifactId, baseVersion );
-        Map metadataMap = this.metadataMap;
-        MetadataResults results = (MetadataResults) metadataMap.get( key );
-        if ( results == null )
-        {
-            results = new MetadataResults();
-            results.setArtifactId( artifactId );
-            results.setGroupId( groupId );
-            results.setVersion( baseVersion );
-            results.setLastModified( lastModified );
-
-            metadataMap.put( key, results );
-            reporting.getMetadata().add( results );
-        }
-        return results;
-    }
-
-    public void removeArtifact( Artifact artifact )
-    {
-        Map map = artifactMap;
-
-        String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                     artifact.getType(), artifact.getClassifier() );
-        ArtifactResults results = (ArtifactResults) map.get( key );
-        if ( results != null )
-        {
-            for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                if ( results.equals( i.next() ) )
-                {
-                    i.remove();
-                }
-            }
-
-            numFailures -= results.getFailures().size();
-            numWarnings -= results.getWarnings().size();
-            numNotices -= results.getNotices().size();
-
-            map.remove( key );
-        }
-    }
-
-    public ArtifactRepository getRepository()
-    {
-        return repository;
-    }
-
-    public boolean isInProgress()
-    {
-        return inProgress;
-    }
-
-    public void setInProgress( boolean inProgress )
-    {
-        this.inProgress = inProgress;
-
-        if ( inProgress )
-        {
-            startTime = System.currentTimeMillis();
-        }
-    }
-
-    public void clear()
-    {
-        // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
-        numWarnings = 0;
-        numNotices = 0;
-        numFailures = 0;
-
-        artifactMap.clear();
-        metadataMap.clear();
-        metadataWithProblems.clear();
-        filteredDatabases.clear();
-
-        reporting.getArtifacts().clear();
-        reporting.getMetadata().clear();
-
-        updateTimings();
-    }
-
-    public void setStartTime( long startTime )
-    {
-        this.startTime = startTime;
-    }
-
-    public long getStartTime()
-    {
-        return startTime;
-    }
-
-    public void updateTimings()
-    {
-        long startTime = getStartTime();
-        Date endTime = new Date();
-        if ( startTime > 0 )
-        {
-            getReporting().setExecutionTime( endTime.getTime() - startTime );
-        }
-        getReporting().setLastModified( endTime.getTime() );
-    }
-
-    public ReportGroup getReportGroup()
+    public int getNumFailures()
     {
-        return reportGroup;
+        int count = 0;
+        count += artifactDatabase.getNumFailures();
+        count += metadataDatabase.getNumFailures();
+        return count;
     }
 
-    public ReportingDatabase getFilteredDatabase( String filter )
+    /**
+     * <p>
+     * Get the number of notices in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of notices in the database.
+     */
+    public int getNumNotices()
     {
-        ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
-
-        if ( reportingDatabase == null )
-        {
-            reportingDatabase = new ReportingDatabase( reportGroup, repository );
-
-            Reporting reporting = reportingDatabase.getReporting();
-            reporting.setExecutionTime( this.reporting.getExecutionTime() );
-            reporting.setLastModified( this.reporting.getLastModified() );
-
-            for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                ArtifactResults results = (ArtifactResults) i.next();
-                ArtifactResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-            for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
-            {
-                MetadataResults results = (MetadataResults) i.next();
-                MetadataResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-
-            filteredDatabases.put( filter, reportingDatabase );
-        }
-
-        return reportingDatabase;
+        int count = 0;
+        count += artifactDatabase.getNumNotices();
+        count += metadataDatabase.getNumNotices();
+        return count;
     }
 
-    private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+    /**
+     * <p>
+     * Get the number of warnings in the database.
+     * </p>
+     * 
+     * <p>
+     * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+     * </p>
+     * 
+     * @return the number of warnings in the database.
+     */
+    public int getNumWarnings()
     {
-        MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
-                                                                              results.getArtifactId(),
-                                                                              results.getVersion(),
-                                                                              results.getLastModified() );
-        reportingDatabase.metadataWithProblems.add( targetResults );
-        return targetResults;
+        int count = 0;
+        count += artifactDatabase.getNumWarnings();
+        count += metadataDatabase.getNumWarnings();
+        return count;
     }
 
-    private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+    public ArtifactResultsDatabase getArtifactDatabase()
     {
-        return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
-                                                     results.getVersion(), results.getType(), results.getClassifier() );
+        return artifactDatabase;
     }
 
-    public int getNumNotices()
+    public MetadataResultsDatabase getMetadataDatabase()
     {
-        return numNotices;
+        return metadataDatabase;
     }
 }
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java
deleted file mode 100644 (file)
index 8bc4b9d..0000000
+++ /dev/null
@@ -1,247 +0,0 @@
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Report executor implementation.
- *
- * @todo should the report set be limitable by configuration?
- * @plexus.component
- */
-public class DefaultReportExecutor
-    extends AbstractLogEnabled
-    implements ReportExecutor
-{
-    /**
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportingStore reportingStore;
-
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = metadata.iterator(); i.hasNext(); )
-        {
-            RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
-
-            File file =
-                new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
-            reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
-
-            reportGroup.processMetadata( repositoryMetadata, repository, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-
-            Model model = null;
-            try
-            {
-                Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
-                                                                              artifact.getArtifactId(),
-                                                                              artifact.getVersion() );
-                MavenProject project =
-                    projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
-
-                model = project.getModel();
-            }
-            catch ( InvalidArtifactRTException e )
-            {
-                reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
-            }
-            catch ( ProjectBuildingException e )
-            {
-                reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
-            }
-
-            reporter.removeArtifact( artifact );
-
-            reportGroup.processArtifact( artifact, model, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        getLogger().debug(
-            "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
-        return reportingStore.getReportsFromStore( repository, reportGroup );
-    }
-
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException
-    {
-        // Flush (as in toilet, not store) the report database
-        ReportingDatabase database = getReportDatabase( repository, reportGroup );
-        database.clear();
-
-        // Discovery process
-        String layoutProperty = getRepositoryLayout( repository.getLayout() );
-        ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-
-        // Save some memory by not tracking paths we won't use
-        // TODO: Plexus CDC should be able to inject this configuration
-        discoverer.setTrackOmittedPaths( false );
-
-        List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-        if ( !artifacts.isEmpty() )
-        {
-            getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
-
-            // Work through these in batches, then flush the project cache.
-            for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-            {
-                int end = j + ARTIFACT_BUFFER_SIZE;
-                List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                // run the reports.
-                runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                // around that. TODO: remove when it is configurable
-                flushProjectBuilderCacheHack();
-            }
-        }
-
-        MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
-        List metadata =
-            metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-
-        if ( !metadata.isEmpty() )
-        {
-            getLogger().info( "Discovered " + metadata.size() + " metadata files" );
-
-            // run the reports
-            runMetadataReports( reportGroup, metadata, repository );
-        }
-    }
-
-    private String getRepositoryLayout( ArtifactRepositoryLayout layout )
-    {
-        // gross limitation that there is no reverse lookup of the hint for the layout.
-        if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
-        {
-            return "default";
-        }
-        else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
-        {
-            return "legacy";
-        }
-        else
-        {
-            throw new IllegalArgumentException( "Unknown layout: " + layout );
-        }
-    }
-
-    private void flushProjectBuilderCacheHack()
-    {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java
deleted file mode 100644 (file)
index d6f7b5d..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Executes a report or report group.
- */
-public interface ReportExecutor
-{
-    /**
-     * Plexus component role name.
-     */
-    String ROLE = ReportExecutor.class.getName();
-
-    /**
-     * Run reports on a set of metadata.
-     *
-     * @param reportGroup the report set to run
-     * @param metadata    the RepositoryMetadata objects to report on
-     * @param repository  the repository that they come from
-     * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException
-     *          if there is a problem reading/writing the report database
-     */
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Run reports on a set of artifacts.
-     *
-     * @param reportGroup the report set to run
-     * @param artifacts   the Artifact objects to report on
-     * @param repository  the repository that they come from
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     */
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Get the report database in use for a given repository.
-     *
-     * @param repository  the repository
-     * @param reportGroup the report set to run
-     * @return the report database
-     * @throws ReportingStoreException if there is a problem reading the report database
-     */
-    ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
-     *
-     * @param repository          the repository to run from
-     * @param blacklistedPatterns the patterns to exclude during discovery
-     * @param filter              the filter to use during discovery to get a consistent list of artifacts
-     * @param reportGroup         the report set to run
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     * @throws org.apache.maven.archiva.discoverer.DiscovererException
-     *                                 if there is a problem finding the artifacts and metadata to report on
-     */
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException;
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java
deleted file mode 100644 (file)
index f1158ad..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-package org.apache.maven.archiva.reporting.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Implementation of a reporting filter. Artifacts already in the database are ignored.
- */
-public class ReportingMetadataFilter
-    implements MetadataFilter
-{
-    private final ReportingDatabase reporter;
-
-    public ReportingMetadataFilter( ReportingDatabase reporter )
-    {
-        this.reporter = reporter;
-    }
-
-    public boolean include( RepositoryMetadata metadata, long timestamp )
-    {
-        return !reporter.isMetadataUpToDate( metadata, timestamp );
-    }
-}
index 2051f7d4b1a1c68d513cdf93d9e0b6d9d626190f..4e5acdddbba178a3f1b8dfebaf62c0de119184a3 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
 import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
 import org.apache.maven.artifact.Artifact;
@@ -46,7 +45,7 @@ public abstract class AbstractReportGroup
      */
     private Map metadataReports;
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
+    public void processArtifact( Artifact artifact, Model model )
     {
         for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
         {
@@ -56,13 +55,12 @@ public abstract class AbstractReportGroup
             {
                 ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
 
-                report.processArtifact( artifact, model, reportingDatabase );
+                report.processArtifact( artifact, model );
             }
         }
     }
 
-    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                                 ReportingDatabase reportingDatabase )
+    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository )
     {
         for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
         {
@@ -72,7 +70,7 @@ public abstract class AbstractReportGroup
             {
                 MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
 
-                report.processMetadata( repositoryMetadata, repository, reportingDatabase );
+                report.processMetadata( repositoryMetadata, repository );
             }
         }
     }
index df26343ac34dc7d94f0bfbc38bf471abee4a428d..58b8e2f635ada9d0a35b8e834291d78cc33e3df6 100644 (file)
@@ -25,7 +25,8 @@ import java.util.Map;
 /**
  * The default report set, for repository health.
  *
- * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health"
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" 
+ *      role-hint="health"
  * @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
  */
 public class DefaultReportGroup
@@ -62,9 +63,4 @@ public class DefaultReportGroup
     {
         return "Repository Health";
     }
-
-    public String getFilename()
-    {
-        return "health-report.xml";
-    }
 }
index 30fe412e8eac45f18512a6febf9c9336eb00edd6..fa34b223335dac177291bdd80cb3c4338c5f62c8 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
@@ -43,19 +42,16 @@ public interface ReportGroup
      *
      * @param artifact          the artifact to process
      * @param model             the POM associated with the artifact to process
-     * @param reportingDatabase the report database to store results in
      */
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
+    void processArtifact( Artifact artifact, Model model );
 
     /**
      * Run any metadata related reports in the report set.
      *
      * @param repositoryMetadata the metadata to process
      * @param repository         the repository the metadata is located in
-     * @param reportingDatabase  the report database to store results in
      */
-    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                          ReportingDatabase reportingDatabase );
+    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository );
 
     /**
      * Whether a report with the given role hint is included in this report set.
@@ -79,11 +75,4 @@ public interface ReportGroup
      * @return the report name
      */
     String getName();
-
-    /**
-     * Get the filename of the reports within the repository's reports directory.
-     *
-     * @return the filename
-     */
-    String getFilename();
 }
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java
new file mode 100644 (file)
index 0000000..3494479
--- /dev/null
@@ -0,0 +1,158 @@
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * ArtifactResultsKey - used by jpox for application identity for the {@link ArtifactResults} object and table. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsKey
+    implements Serializable
+{
+    public String groupId = "";
+
+    public String artifactId = "";
+
+    public String version = "";
+
+    public String type = "";
+
+    public String classifier = "";
+
+    public ArtifactResultsKey()
+    {
+        /* do nothing */
+    }
+
+    public ArtifactResultsKey( String key )
+    {
+        String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+        groupId = parts[0];
+        artifactId = parts[1];
+        version = parts[2];
+        type = parts[3];
+        classifier = parts[4];
+    }
+
+    public String toString()
+    {
+        return StringUtils.join( new String[] { groupId, artifactId, version, type, classifier }, ':' );
+    }
+
+    public int hashCode()
+    {
+        final int PRIME = 31;
+        int result = 1;
+        result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+        result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+        result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+        result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() );
+        result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() );
+        return result;
+    }
+
+    public boolean equals( Object obj )
+    {
+        if ( this == obj )
+        {
+            return true;
+        }
+
+        if ( obj == null )
+        {
+            return false;
+        }
+
+        if ( getClass() != obj.getClass() )
+        {
+            return false;
+        }
+
+        final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+        if ( groupId == null )
+        {
+            if ( other.groupId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !groupId.equals( other.groupId ) )
+        {
+            return false;
+        }
+
+        if ( artifactId == null )
+        {
+            if ( other.artifactId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !artifactId.equals( other.artifactId ) )
+        {
+            return false;
+        }
+
+        if ( version == null )
+        {
+            if ( other.version != null )
+            {
+                return false;
+            }
+        }
+        else if ( !version.equals( other.version ) )
+        {
+            return false;
+        }
+
+        if ( type == null )
+        {
+            if ( other.type != null )
+            {
+                return false;
+            }
+        }
+        else if ( !type.equals( other.type ) )
+        {
+            return false;
+        }
+
+        if ( classifier == null )
+        {
+            if ( other.classifier != null )
+            {
+                return false;
+            }
+        }
+        else if ( !classifier.equals( other.classifier ) )
+        {
+            return false;
+        }
+
+        return true;
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java
new file mode 100644 (file)
index 0000000..aeaff3b
--- /dev/null
@@ -0,0 +1,126 @@
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * MetadataResultsKey - used by jpox for application identity for the {@link MetadataResults} object and table. 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsKey
+    implements Serializable
+{
+    public String groupId = "";
+
+    public String artifactId = "";
+
+    public String version = "";
+
+    public MetadataResultsKey()
+    {
+        /* do nothing */
+    }
+
+    public MetadataResultsKey( String key )
+    {
+        String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+        groupId = parts[0];
+        artifactId = parts[1];
+        version = parts[2];
+    }
+
+    public String toString()
+    {
+        return StringUtils.join( new String[] { groupId, artifactId, version }, ':' );
+    }
+
+    public int hashCode()
+    {
+        final int PRIME = 31;
+        int result = 1;
+        result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+        result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+        result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+        return result;
+    }
+
+    public boolean equals( Object obj )
+    {
+        if ( this == obj )
+        {
+            return true;
+        }
+
+        if ( obj == null )
+        {
+            return false;
+        }
+
+        if ( getClass() != obj.getClass() )
+        {
+            return false;
+        }
+
+        final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+        if ( groupId == null )
+        {
+            if ( other.groupId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !groupId.equals( other.groupId ) )
+        {
+            return false;
+        }
+
+        if ( artifactId == null )
+        {
+            if ( other.artifactId != null )
+            {
+                return false;
+            }
+        }
+        else if ( !artifactId.equals( other.artifactId ) )
+        {
+            return false;
+        }
+
+        if ( version == null )
+        {
+            if ( other.version != null )
+            {
+                return false;
+            }
+        }
+        else if ( !version.equals( other.version ) )
+        {
+            return false;
+        }
+
+        return true;
+    }    
+}
index 79b7d53f0d2522c30b06faae419e51fedd4ca533..52e63c5e71d2d5630d43ea76008285e11551d18b 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.model.Model;
 
@@ -31,5 +30,5 @@ public interface ArtifactReportProcessor
 {
     String ROLE = ArtifactReportProcessor.class.getName();
 
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
+    void processArtifact( Artifact artifact, Model model );
 }
index 7660dfcb0a60188b8738b8fc99594a2e502fb0e6..04a31af7e0b583ced16c940cab8724e427397696 100644 (file)
@@ -22,7 +22,7 @@ package org.apache.maven.archiva.reporting.processor;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.layer.RepositoryQueryLayer;
 import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -61,6 +61,11 @@ public class BadMetadataReportProcessor
      */
     private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
 
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
     private static final String ROLE_HINT = "bad-metadata";
 
     /**
@@ -70,18 +75,17 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
         if ( metadata.storedInGroupDirectory() )
         {
             try
             {
-                checkPluginMetadata( metadata, repository, reporter );
+                checkPluginMetadata( metadata, repository );
             }
             catch ( IOException e )
             {
-                addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
+                addWarning( metadata, null, "Error getting plugin artifact directories versions: " + e );
             }
         }
         else
@@ -98,36 +102,34 @@ public class BadMetadataReportProcessor
             }
             if ( !found )
             {
-                addFailure( reporter, metadata, "missing-last-updated",
-                            "Missing lastUpdated element inside the metadata." );
+                addFailure( metadata, "missing-last-updated", "Missing lastUpdated element inside the metadata." );
             }
 
             if ( metadata.storedInArtifactVersionDirectory() )
             {
-                checkSnapshotMetadata( metadata, repository, reporter );
+                checkSnapshotMetadata( metadata, repository );
             }
             else
             {
-                checkMetadataVersions( metadata, repository, reporter );
+                checkMetadataVersions( metadata, repository );
 
                 try
                 {
-                    checkRepositoryVersions( metadata, repository, reporter );
+                    checkRepositoryVersions( metadata, repository );
                 }
                 catch ( IOException e )
                 {
                     String reason = "Error getting plugin artifact directories versions: " + e;
-                    addWarning( reporter, metadata, null, reason );
+                    addWarning( metadata, null, reason );
                 }
             }
         }
     }
 
-    private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addWarning( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+        database.addWarning( metadata, ROLE_HINT, problem, reason );
     }
 
     /**
@@ -137,12 +139,11 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                      ReportingDatabase reporter )
+    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
         throws IOException
     {
-        File metadataDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+        File metadataDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+            .getParentFile();
         List pluginDirs = getArtifactIdFiles( metadataDir );
 
         Map prefixes = new HashMap();
@@ -153,22 +154,22 @@ public class BadMetadataReportProcessor
             String artifactId = plugin.getArtifactId();
             if ( artifactId == null || artifactId.length() == 0 )
             {
-                addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+                addFailure( metadata, "missing-artifact-id:" + plugin.getPrefix(),
                             "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
             }
 
             String prefix = plugin.getPrefix();
             if ( prefix == null || prefix.length() == 0 )
             {
-                addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+                addFailure( metadata, "missing-plugin-prefix:" + artifactId,
                             "Missing or empty plugin prefix for artifactId " + artifactId + "." );
             }
             else
             {
                 if ( prefixes.containsKey( prefix ) )
                 {
-                    addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
-                                "Duplicate plugin prefix found: " + prefix + "." );
+                    addFailure( metadata, "duplicate-plugin-prefix:" + prefix, "Duplicate plugin prefix found: "
+                        + prefix + "." );
                 }
                 else
                 {
@@ -181,8 +182,8 @@ public class BadMetadataReportProcessor
                 File pluginDir = new File( metadataDir, artifactId );
                 if ( !pluginDirs.contains( pluginDir ) )
                 {
-                    addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
-                                "Metadata plugin " + artifactId + " not found in the repository" );
+                    addFailure( metadata, "missing-plugin-from-repository:" + artifactId, "Metadata plugin "
+                        + artifactId + " not found in the repository" );
                 }
                 else
                 {
@@ -196,8 +197,8 @@ public class BadMetadataReportProcessor
             for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
             {
                 File plugin = (File) plugins.next();
-                addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
-                    plugin.getName() + " is present in the repository but " + "missing in the metadata." );
+                addFailure( metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + plugin.getName()
+                    + " is present in the repository but " + "missing in the metadata." );
             }
         }
     }
@@ -209,27 +210,26 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
+    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+        RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
 
         Versioning versioning = metadata.getMetadata().getVersioning();
         if ( versioning != null )
         {
             Snapshot snapshot = versioning.getSnapshot();
 
-            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
-                                                  snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
-            Artifact artifact =
-                artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, snapshot
+                .getTimestamp()
+                + "-" + snapshot.getBuildNumber() );
+            Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(),
+                                                                       version );
             artifact.isSnapshot(); // trigger baseVersion correction
 
             if ( !repositoryQueryLayer.containsArtifact( artifact ) )
             {
-                addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
-                            "Snapshot artifact " + version + " does not exist." );
+                addFailure( metadata, "missing-snapshot-artifact-from-repository:" + version, "Snapshot artifact "
+                    + version + " does not exist." );
             }
         }
     }
@@ -241,11 +241,9 @@ public class BadMetadataReportProcessor
      * @param repository the repository where the metadata was encountered
      * @param reporter   the ReportingDatabase to receive processing results
      */
-    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
+    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository )
     {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+        RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
 
         Versioning versioning = metadata.getMetadata().getVersioning();
         if ( versioning != null )
@@ -254,13 +252,13 @@ public class BadMetadataReportProcessor
             {
                 String version = (String) versions.next();
 
-                Artifact artifact =
-                    artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+                Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata
+                    .getArtifactId(), version );
 
                 if ( !repositoryQueryLayer.containsArtifact( artifact ) )
                 {
-                    addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
-                        version + " is present in metadata but " + "missing in the repository." );
+                    addFailure( metadata, "missing-artifact-from-repository:" + version, "Artifact version " + version
+                        + " is present in metadata but " + "missing in the repository." );
                 }
             }
         }
@@ -275,14 +273,13 @@ public class BadMetadataReportProcessor
      * @param reporter   the ReportingDatabase to receive processing results
      * @throws java.io.IOException if there is a problem reading from the file system
      */
-    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                          ReportingDatabase reporter )
+    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository )
         throws IOException
     {
         Versioning versioning = metadata.getMetadata().getVersioning();
         List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
-        File versionsDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+        File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+            .getParentFile();
 
         // TODO: I don't know how this condition can happen, but it was seen on the main repository.
         // Avoid hard failure
@@ -295,14 +292,14 @@ public class BadMetadataReportProcessor
                 String version = path.getParentFile().getName();
                 if ( !metadataVersions.contains( version ) )
                 {
-                    addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
-                        version + " found in the repository but " + "missing in the metadata." );
+                    addFailure( metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + version
+                        + " found in the repository but " + "missing in the metadata." );
                 }
             }
         }
         else
         {
-            addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
+            addFailure( metadata, null, "Metadata's directory did not exist: " + versionsDir );
         }
     }
 
@@ -339,10 +336,9 @@ public class BadMetadataReportProcessor
         return artifactIdFiles;
     }
 
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addFailure( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+        database.addFailure( metadata, ROLE_HINT, problem, reason );
     }
 }
index 36b60cc2ee96c91a89265161f36c109fb3a127ae..8bd5e141f331ca545f48b7c5c632682e8b4079eb 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -48,10 +48,15 @@ public class ChecksumArtifactReportProcessor
      * @plexus.requirement role-hint="md5"
      */
     private Digester md5Digester;
+    
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
 
     private static final String ROLE_HINT = "checksum";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
@@ -68,11 +73,11 @@ public class ChecksumArtifactReportProcessor
 
         // TODO: make md5 configurable
 //        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, artifact );
     }
 
     private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, Artifact artifact )
+                                 Artifact artifact )
     {
         File checksumFile = new File( repository.getBasedir(), path );
         if ( checksumFile.exists() )
@@ -83,23 +88,23 @@ public class ChecksumArtifactReportProcessor
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
+                addFailure( artifact, "checksum-wrong", e.getMessage() );
             }
             catch ( IOException e )
             {
-                addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
+                addFailure( artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
             }
         }
         else
         {
-            addFailure( reporter, artifact, "checksum-missing",
+            addFailure( artifact, "checksum-missing",
                         digester.getAlgorithm() + " checksum file does not exist." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 }
index 1f0e860a797b27fa1cf35eeba3fe0f7bd544eb82..84313dc52fa936f2df8da6189bedc1dfccad9749 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
 import org.codehaus.plexus.digest.Digester;
@@ -48,32 +48,36 @@ public class ChecksumMetadataReportProcessor
      */
     private Digester md5Digester;
 
+    /**
+     * @plexus.requirement
+     */
+    private MetadataResultsDatabase database;
+
     private static final String ROLE_HINT = "checksum-metadata";
 
     /**
      * Validate the checksums of the metadata. Get the metadata file from the
      * repository then validate the checksum.
      */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
     {
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         //check if checksum files exist
         String path = repository.pathOfRemoteRepositoryMetadata( metadata );
         File file = new File( repository.getBasedir(), path );
 
-        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+        verifyChecksum( repository, path + ".md5", file, md5Digester, metadata );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, metadata );
     }
 
     private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, RepositoryMetadata metadata )
+                                 RepositoryMetadata metadata )
     {
         File checksumFile = new File( repository.getBasedir(), path );
         if ( checksumFile.exists() )
@@ -84,25 +88,23 @@ public class ChecksumMetadataReportProcessor
             }
             catch ( DigesterException e )
             {
-                addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
+                addFailure( metadata, "checksum-wrong", e.getMessage() );
             }
             catch ( IOException e )
             {
-                addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
+                addFailure( metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
             }
         }
         else
         {
-            addFailure( reporter, metadata, "checksum-missing",
-                        digester.getAlgorithm() + " checksum file does not exist." );
+            addFailure( metadata, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
+    private void addFailure( RepositoryMetadata metadata, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+        database.addFailure( metadata, ROLE_HINT, problem, reason );
     }
 
 }
index 2d5bf80b4d305b7656f6705ae84d54bcb135494a..049767c2dfdfbff9a5169d01137ceea081422200 100644 (file)
@@ -21,7 +21,7 @@ package org.apache.maven.archiva.reporting.processor;
 
 import org.apache.maven.archiva.layer.RepositoryQueryLayer;
 import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
@@ -49,34 +49,39 @@ public class DependencyArtifactReportProcessor
      */
     private RepositoryQueryLayerFactory layerFactory;
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     private static final String POM = "pom";
 
     private static final String ROLE_HINT = "dependency";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
         if ( !queryLayer.containsArtifact( artifact ) )
         {
             // TODO: is this even possible?
-            addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+            addFailure( artifact, "missing-artifact", "Artifact does not exist in the repository" );
         }
 
         if ( model != null && POM.equals( artifact.getType() ) )
         {
             List dependencies = model.getDependencies();
-            processDependencies( dependencies, reporter, queryLayer, artifact );
+            processDependencies( dependencies, queryLayer, artifact );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
-    private void processDependencies( List dependencies, ReportingDatabase reporter,
-                                      RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
+    private void processDependencies( List dependencies, RepositoryQueryLayer repositoryQueryLayer,
+                                      Artifact sourceArtifact )
     {
         if ( dependencies.size() > 0 )
         {
@@ -100,19 +105,19 @@ public class DependencyArtifactReportProcessor
 
                     if ( !repositoryQueryLayer.containsArtifact( artifact ) )
                     {
-                        String reason = MessageFormat.format(
-                            "Artifact''s dependency {0} does not exist in the repository",
-                            new String[]{getDependencyString( dependency )} );
-                        addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
-                                    reason );
+                        String reason = MessageFormat
+                            .format( "Artifact''s dependency {0} does not exist in the repository",
+                                     new String[] { getDependencyString( dependency ) } );
+                        addFailure( sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), reason );
                     }
                 }
                 catch ( InvalidVersionSpecificationException e )
                 {
                     String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
-                                                          new String[]{getDependencyString( dependency ),
-                                                              dependency.getVersion()} );
-                    addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
+                                                          new String[] {
+                                                              getDependencyString( dependency ),
+                                                              dependency.getVersion() } );
+                    addFailure( sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
                 }
             }
         }
@@ -156,7 +161,7 @@ public class DependencyArtifactReportProcessor
         }
 
         return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
-                                                         dependency.getType(), dependency.getClassifier(),
-                                                         dependency.getScope() );
+                                                         dependency.getType(), dependency.getClassifier(), dependency
+                                                             .getScope() );
     }
 }
index 4c15eab455aecefcaaed7b8c8e7f8aee76821718..5dff8c50ef2de3cea7df8d5251090976e9a2723f 100644 (file)
@@ -27,7 +27,7 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
 import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
 import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
 import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -62,9 +62,14 @@ public class DuplicateArtifactFileReportProcessor
      */
     private String indexDirectory;
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     private static final String ROLE_HINT = "duplicate";
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
         if ( artifact.getFile() != null )
@@ -82,16 +87,16 @@ public class DuplicateArtifactFileReportProcessor
             }
             catch ( DigesterException e )
             {
-                addWarning( reporter, artifact, null,
-                            "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+                addWarning( artifact, null, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
             }
 
             if ( checksum != null )
             {
                 try
                 {
-                    List results = index.search( new LuceneQuery(
-                        new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+                    List results = index
+                        .search( new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum
+                            .toLowerCase() ) ) ) );
 
                     if ( !results.isEmpty() )
                     {
@@ -106,8 +111,7 @@ public class DuplicateArtifactFileReportProcessor
                                 String groupId = artifact.getGroupId();
                                 if ( groupId.equals( result.getGroupId() ) )
                                 {
-                                    addFailure( reporter, artifact, "duplicate",
-                                                "Found duplicate for " + artifact.getId() );
+                                    addFailure( artifact, "duplicate", "Found duplicate for " + artifact.getId() );
                                 }
                             }
                         }
@@ -115,25 +119,25 @@ public class DuplicateArtifactFileReportProcessor
                 }
                 catch ( RepositoryIndexSearchException e )
                 {
-                    addWarning( reporter, artifact, null, "Failed to search in index" + e );
+                    addWarning( artifact, null, "Failed to search in index" + e );
                 }
             }
         }
         else
         {
-            addWarning( reporter, artifact, null, "Artifact file is null" );
+            addWarning( artifact, null, "Artifact file is null" );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addWarning( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+        database.addWarning( artifact, ROLE_HINT, problem, reason );
     }
 }
index 1d4a6a64e681414b54eb8d98da45b21ae8790578..05977be8624b0872fa409ad2a93fee099fd4ab08 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
@@ -43,19 +43,24 @@ public class InvalidPomArtifactReportProcessor
 {
     private static final String ROLE_HINT = "invalid-pom";
 
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
     /**
      * @param artifact The pom xml file to be validated, passed as an artifact object.
      * @param reporter The artifact reporter object.
      */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         if ( "pom".equals( artifact.getType().toLowerCase() ) )
@@ -64,7 +69,7 @@ public class InvalidPomArtifactReportProcessor
 
             if ( !f.exists() )
             {
-                addFailure( reporter, artifact, "pom-missing", "POM not found." );
+                addFailure( artifact, "pom-missing", "POM not found." );
             }
             else
             {
@@ -79,13 +84,12 @@ public class InvalidPomArtifactReportProcessor
                 }
                 catch ( XmlPullParserException e )
                 {
-                    addFailure( reporter, artifact, "pom-parse-exception",
+                    addFailure( artifact, "pom-parse-exception",
                                 "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
                 }
                 catch ( IOException e )
                 {
-                    addFailure( reporter, artifact, "pom-io-exception",
-                                "Error while reading the pom xml file: " + e.getMessage() );
+                    addFailure( artifact, "pom-io-exception", "Error while reading the pom xml file: " + e.getMessage() );
                 }
                 finally
                 {
@@ -95,9 +99,9 @@ public class InvalidPomArtifactReportProcessor
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 }
index cfd5944510f4544968c778a02441a59fd7ed6316..a8f5129b18cecc7bc5f694116e5755037100f454 100644 (file)
@@ -20,14 +20,13 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.project.MavenProjectBuilder;
 import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
 
 import java.io.File;
@@ -57,13 +56,19 @@ public class LocationArtifactReportProcessor
     private ArtifactFactory artifactFactory;
 
     // TODO: share with other code with the same
-    private static final Set JAR_FILE_TYPES =
-        new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+    private static final Set JAR_FILE_TYPES = new HashSet( Arrays.asList( new String[] {
+        "jar",
+        "war",
+        "par",
+        "ejb",
+        "ear",
+        "rar",
+        "sar" } ) );
 
     /**
      * @plexus.requirement
      */
-    private MavenProjectBuilder projectBuilder;
+    private ArtifactResultsDatabase database;
 
     private static final String POM = "pom";
 
@@ -77,15 +82,15 @@ public class LocationArtifactReportProcessor
      * location is valid based on the location specified in the pom. Check if the both the location
      * specified in the file system pom and in the pom included in the package is the same.
      */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         adjustDistributionArtifactHandler( artifact );
@@ -100,19 +105,16 @@ public class LocationArtifactReportProcessor
             {
                 //check if the artifact is located in its proper location based on the info
                 //specified in the model object/pom
-                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
-                                                                                       model.getArtifactId(),
-                                                                                       model.getVersion(),
-                                                                                       artifact.getType(),
-                                                                                       artifact.getClassifier() );
+                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), model
+                    .getArtifactId(), model.getVersion(), artifact.getType(), artifact.getClassifier() );
 
                 adjustDistributionArtifactHandler( modelArtifact );
                 String modelPath = repository.pathOf( modelArtifact );
                 if ( !modelPath.equals( artifactPath ) )
                 {
-                    addFailure( reporter, artifact, "repository-pom-location",
-                                "The artifact is out of place. It does not match the specified location in the repository pom: " +
-                                    modelPath );
+                    addFailure( artifact, "repository-pom-location",
+                                "The artifact is out of place. It does not match the specified location in the repository pom: "
+                                    modelPath );
                 }
             }
         }
@@ -126,7 +128,7 @@ public class LocationArtifactReportProcessor
             {
                 //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
                 //check if the pom is included in the package
-                Model extractedModel = readArtifactModel( file, artifact, reporter );
+                Model extractedModel = readArtifactModel( file, artifact );
 
                 if ( extractedModel != null )
                 {
@@ -136,7 +138,7 @@ public class LocationArtifactReportProcessor
                                                                                       extractedModel.getPackaging() );
                     if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
                     {
-                        addFailure( reporter, artifact, "packaged-pom-location",
+                        addFailure( artifact, "packaged-pom-location",
                                     "The artifact is out of place. It does not match the specified location in the packaged pom." );
                     }
                 }
@@ -144,15 +146,14 @@ public class LocationArtifactReportProcessor
         }
         else
         {
-            addFailure( reporter, artifact, "missing-artifact",
-                        "The artifact file [" + file + "] cannot be found for metadata." );
+            addFailure( artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
         }
     }
 
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addFailure( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+        database.addFailure( artifact, ROLE_HINT, problem, reason );
     }
 
     private static void adjustDistributionArtifactHandler( Artifact artifact )
@@ -168,7 +169,7 @@ public class LocationArtifactReportProcessor
         }
     }
 
-    private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
+    private Model readArtifactModel( File file, Artifact artifact )
     {
         Model model = null;
 
@@ -178,8 +179,8 @@ public class LocationArtifactReportProcessor
             jar = new JarFile( file );
 
             //Get the entry and its input stream.
-            JarEntry entry = jar.getJarEntry(
-                "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
+            JarEntry entry = jar.getJarEntry( "META-INF/maven/" + artifact.getGroupId() + "/"
+                + artifact.getArtifactId() + "/pom.xml" );
 
             // If the entry is not null, extract it.
             if ( entry != null )
@@ -198,11 +199,11 @@ public class LocationArtifactReportProcessor
         }
         catch ( IOException e )
         {
-            addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
+            addWarning( artifact, "Unable to read artifact to extract model: " + e );
         }
         catch ( XmlPullParserException e )
         {
-            addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
+            addWarning( artifact, "Unable to parse extracted model: " + e );
         }
         finally
         {
@@ -222,10 +223,10 @@ public class LocationArtifactReportProcessor
         return model;
     }
 
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+    private void addWarning( Artifact artifact, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, null, reason );
+        database.addWarning( artifact, ROLE_HINT, null, reason );
     }
 
     private Model readModel( InputStream entryStream )
index dc6790abcf204902723d61bf17e3ce8d34c577f0..ad8465028b2017f04852bdaab8d11138e907d048 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
 import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
 
@@ -30,5 +29,5 @@ public interface MetadataReportProcessor
 {
     String ROLE = MetadataReportProcessor.class.getName();
 
-    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
+    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository );
 }
index 21bd7b4dc68085e7a047dcd2f0d8614eb5d6fc8a..b5f0817cbbb8beaeeb81193679640591afe93ffd 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -44,8 +44,15 @@ public class OldArtifactReportProcessor
      * @plexus.configuration default-value="31536000"
      */
     private int maxAge;
+    
+    /**
+     * TODO: Must create an 'Old Artifact' database.
+     * TODO: Base this off of an artifact table query instead.
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
 
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    public void processArtifact( Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
@@ -68,7 +75,7 @@ public class OldArtifactReportProcessor
             if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
             {
                 // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-                reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
+                database.addNotice( artifact, ROLE_HINT, "old-artifact",
                                     "The artifact is older than the maximum age of " + maxAge + " seconds." );
             }
         }
index 04720762dd2a5bed379a5077ad2a80ef1c518061..02c2cecb0169653c323646c2ca5d46244b3f2d4a 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.handler.DefaultArtifactHandler;
 import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -62,15 +62,22 @@ public class OldSnapshotArtifactReportProcessor
      */
     private int maxSnapshots;
 
-    public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
+    /**
+     * TODO: Must create an 'Old Artifact' database.
+     * TODO: Base this off of an artifact table query instead.
+     * @plexus.requirement
+     */
+    private ArtifactResultsDatabase database;
+
+    public void processArtifact( final Artifact artifact, Model model )
     {
         ArtifactRepository repository = artifact.getRepository();
 
         if ( !"file".equals( repository.getProtocol() ) )
         {
             // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+            throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+                + "'. Only file based repositories are supported" );
         }
 
         adjustDistributionArtifactHandler( artifact );
@@ -95,13 +102,14 @@ public class OldSnapshotArtifactReportProcessor
                     catch ( ParseException e )
                     {
                         throw new IllegalStateException(
-                            "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
+                                                         "Shouldn't match timestamp pattern and not be able to parse it: "
+                                                             + m.group( 2 ) );
                     }
 
                     if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
                     {
-                        addNotice( reporter, artifact, "snapshot-expired-time",
-                                   "The artifact is older than the maximum age of " + maxAge + " seconds." );
+                        addNotice( artifact, "snapshot-expired-time", "The artifact is older than the maximum age of "
+                            + maxAge + " seconds." );
                     }
                     else if ( maxSnapshots > 0 )
                     {
@@ -109,12 +117,12 @@ public class OldSnapshotArtifactReportProcessor
                         {
                             public boolean accept( File file, String string )
                             {
-                                return string.startsWith( artifact.getArtifactId() + "-" ) &&
-                                    string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
+                                return string.startsWith( artifact.getArtifactId() + "-" )
+                                    && string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
                             }
                         } );
 
-                        List/*<Integer>*/ buildNumbers = new ArrayList();
+                        List/*<Integer>*/buildNumbers = new ArrayList();
                         Integer currentBuild = null;
                         for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
                         {
@@ -149,7 +157,7 @@ public class OldSnapshotArtifactReportProcessor
 
                         if ( buildNumbers.contains( currentBuild ) )
                         {
-                            addNotice( reporter, artifact, "snapshot-expired-count",
+                            addNotice( artifact, "snapshot-expired-count",
                                        "The artifact is older than the maximum number of retained snapshot builds." );
                         }
                     }
@@ -162,10 +170,10 @@ public class OldSnapshotArtifactReportProcessor
         }
     }
 
-    private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    private void addNotice( Artifact artifact, String problem, String reason )
     {
         // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addNotice( artifact, ROLE_HINT, problem, reason );
+        database.addNotice( artifact, ROLE_HINT, problem, reason );
     }
 
     private static void adjustDistributionArtifactHandler( Artifact artifact )
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java
deleted file mode 100644 (file)
index 0d3c813..0000000
+++ /dev/null
@@ -1,143 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultReportingStore
-    extends AbstractLogEnabled
-    implements ReportingStore
-{
-    /**
-     * The cached reports for given repositories.
-     */
-    private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
-
-    public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        String key = getKey( repository, reportGroup );
-        ReportingDatabase database = (ReportingDatabase) reports.get( key );
-
-        if ( database == null )
-        {
-            ReportingXpp3Reader reader = new ReportingXpp3Reader();
-
-            File file = getReportFilename( repository, reportGroup );
-
-            FileReader fileReader = null;
-            try
-            {
-                fileReader = new FileReader( file );
-            }
-            catch ( FileNotFoundException e )
-            {
-                database = new ReportingDatabase( reportGroup, repository );
-            }
-
-            if ( database == null )
-            {
-                getLogger().info( "Reading report database from " + file );
-                try
-                {
-                    Reporting reporting = reader.read( fileReader, false );
-                    database = new ReportingDatabase( reportGroup, reporting, repository );
-                }
-                catch ( IOException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                catch ( XmlPullParserException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                finally
-                {
-                    IOUtils.closeQuietly( fileReader );
-                }
-            }
-
-            reports.put( key, database );
-        }
-        return database;
-    }
-
-    private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return repository.getId() + "/" + reportGroup.getFilename();
-    }
-
-    private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
-    }
-
-    public void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        database.updateTimings();
-
-        ReportingXpp3Writer writer = new ReportingXpp3Writer();
-
-        File file = getReportFilename( repository, database.getReportGroup() );
-        getLogger().info( "Writing reports to " + file );
-        FileWriter fileWriter = null;
-        try
-        {
-            file.getParentFile().mkdirs();
-
-            fileWriter = new FileWriter( file );
-            writer.write( fileWriter, database.getReporting() );
-        }
-        catch ( IOException e )
-        {
-            throw new ReportingStoreException( e.getMessage(), e );
-        }
-        finally
-        {
-            IOUtils.closeQuietly( fileWriter );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java
deleted file mode 100644 (file)
index a7ead76..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * A component for loading the reporting database into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ReportingStore
-{
-    /**
-     * The Plexus role for the component.
-     */
-    String ROLE = ReportingStore.class.getName();
-
-    /**
-     * Get the reports from the store. A cached version may be used.
-     *
-     * @param repository  the repository to load the reports for
-     * @param reportGroup the report group to get the report for
-     * @return the reporting database
-     * @throws ReportingStoreException if there was a problem reading the store
-     */
-    ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Save the reporting to the store.
-     *
-     * @param database   the reports to store
-     * @param repository the repositorry to store the reports in
-     * @throws ReportingStoreException if there was a problem writing the store
-     */
-    void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java
deleted file mode 100644 (file)
index e3f0880..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring using the reporting store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ReportingStoreException
-    extends Exception
-{
-    public ReportingStoreException( String message )
-    {
-        super( message );
-    }
-
-    public ReportingStoreException( String message, Throwable e )
-    {
-        super( message, e );
-    }
-}
index 915faaa31b6a414e5d6a9599ea73b35e983b56d2..8b41251ca2c3582e76e5fc87cd04f1c53d9d8a1a 100644 (file)
@@ -1,3 +1,4 @@
+<?xml version="1.0" ?>
 <!--
   ~ Licensed to the Apache Software Foundation (ASF) under one
   ~ or more contributor license agreements.  See the NOTICE file
@@ -30,7 +31,7 @@
     </default>
   </defaults>
   <classes>
-    <class rootElement="true" xml.tagName="reporting">
+    <class rootElement="true" xml.tagName="reporting" stash.storable="false">
       <name>Reporting</name>
       <version>1.0.0</version>
       <fields>
             <multiplicity>*</multiplicity>
           </association>
         </field>
-        <field xml.attribute="true">
-          <name>lastModified</name>
-          <version>1.0.0</version>
-          <type>long</type>
-        </field>
-        <field xml.attribute="true">
-          <name>executionTime</name>
-          <version>1.0.0</version>
-          <type>long</type>
-        </field>
       </fields>
     </class>
-    <class>
+    <class stash.storable="true" 
+           jpox.use-identifiers-as-primary-key="true"
+           jpox.identity-type="application"
+           jpox.identity-class="ArtifactResultsKey">
       <name>ArtifactResults</name>
       <version>1.0.0</version>
       <fields>
-        <field>
-          <name>failures</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field>
-          <name>warnings</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field>
-          <name>notices</name>
-          <version>1.0.0</version>
-          <association>
-            <type>Result</type>
-            <multiplicity>*</multiplicity>
-          </association>
-        </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true" 
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>groupId</name>
           <identity>true</identity>
           <version>1.0.0</version>
             The group ID of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>artifactId</name>
           <version>1.0.0</version>
           <identity>true</identity>
             The artifact ID of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>version</name>
           <version>1.0.0</version>
           <identity>true</identity>
             The version of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>type</name>
           <version>1.0.0</version>
           <type>String</type>
             The type of the artifact in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>classifier</name>
           <version>1.0.0</version>
           <type>String</type>
             The classifier of the artifact in the result.
           </description>
         </field>
-      </fields>
-    </class>
-    <class>
-      <name>MetadataResults</name>
-      <version>1.0.0</version>
-      <fields>
         <field>
           <name>failures</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         <field>
           <name>warnings</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         <field>
           <name>notices</name>
           <version>1.0.0</version>
-          <association>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
             <type>Result</type>
             <multiplicity>*</multiplicity>
           </association>
         </field>
-        <field xml.attribute="true">
+      </fields>
+    </class>
+    <class stash.storable="true" 
+           jpox.use-identifiers-as-primary-key="true"
+           jpox.identity-type="application"
+           jpox.identity-class="MetadataResultsKey">
+      <name>MetadataResults</name>
+      <version>1.0.0</version>
+      <fields>
+        <field xml.attribute="true"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>groupId</name>
           <version>1.0.0</version>
           <type>String</type>
             The group ID of the metadata in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>artifactId</name>
           <version>1.0.0</version>
           <type>String</type>
             The artifact ID of the metadata in the result.
           </description>
         </field>
-        <field xml.attribute="true">
+        <field xml.attribute="true"
+               jpox.nullValue="none"
+               jpox.primary-key="true" 
+               jpox.value-strategy="off" 
+               jpox.persistence-modifier="persistent">
           <name>version</name>
           <version>1.0.0</version>
           <type>String</type>
             The version of the metadata in the result.
           </description>
         </field>
+        <field>
+          <name>failures</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
+        <field>
+          <name>warnings</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
+        <field>
+          <name>notices</name>
+          <version>1.0.0</version>
+          <association stash.part="true"
+                       jpox.join="true"
+                       java.init="field"
+                       jpox.dependent="true"
+                       java.generate-break="false"
+                       java.generate-create="false">
+            <type>Result</type>
+            <multiplicity>*</multiplicity>
+          </association>
+        </field>
         <field xml.attribute="true">
           <name>lastModified</name>
           <version>1.0.0</version>
         </field>
       </fields>
     </class>
-    <class>
+    <class stash.storable="true">
       <name>Result</name>
       <version>1.0.0</version>
       <fields>
index 4b94710830eee06e0457f710cf49fbe70a480243..a0d6894ba54df3a101d69f6b7b69cfe9e8f5cee3 100644 (file)
@@ -25,8 +25,18 @@ import org.apache.maven.artifact.repository.ArtifactRepository;
 import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
 import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
 import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.jpox.SchemaTool;
 
 import java.io.File;
+import java.net.URL;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
 
 /**
  *
@@ -50,6 +60,8 @@ public abstract class AbstractRepositoryReportsTestCase
     {
         super.setUp();
 
+        setupJdoFactory();
+
         File repositoryDirectory = getTestFile( "src/test/repository" );
 
         factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
@@ -60,14 +72,57 @@ public abstract class AbstractRepositoryReportsTestCase
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
     }
 
-    protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId,
-                                                     String version )
+    protected void setupJdoFactory()
+        throws Exception
+    {
+        DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
+
+        jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); //$NON-NLS-1$
+
+        jdoFactory.setDriverName( "org.hsqldb.jdbcDriver" ); //$NON-NLS-1$
+
+        jdoFactory.setUrl( "jdbc:hsqldb:mem:" + getName() ); //$NON-NLS-1$
+
+        jdoFactory.setUserName( "sa" ); //$NON-NLS-1$
+
+        jdoFactory.setPassword( "" ); //$NON-NLS-1$
+
+        jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+        jdoFactory.setProperty( "javax.jdo.PersistenceManagerFactoryClass", "org.jpox.PersistenceManagerFactoryImpl" );
+
+        Properties properties = jdoFactory.getProperties();
+
+        for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
+        {
+            Map.Entry entry = (Map.Entry) it.next();
+
+            System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
+        }
+
+        SchemaTool.createSchemaTables( new URL[] { getClass()
+            .getResource( "/org/apache/maven/archiva/reporting/model/package.jdo" ) }, new URL[] {}, null, false, null ); //$NON-NLS-1$
+
+        PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
+
+        assertNotNull( pmf );
+
+        PersistenceManager pm = pmf.getPersistenceManager();
+
+        pm.close();
+    }
+
+    protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId, String version )
         throws Exception
     {
         Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "jar" );
 
-        artifact.setRepository(
-            factory.createArtifactRepository( "repository", repository.toURL().toString(), layout, null, null ) );
+        artifact.setRepository( factory.createArtifactRepository( "repository", repository.toURL().toString(), layout,
+                                                                  null, null ) );
 
         artifact.isSnapshot();
 
@@ -90,8 +145,8 @@ public abstract class AbstractRepositoryReportsTestCase
     protected Artifact createArtifactWithClassifier( String groupId, String artifactId, String version, String type,
                                                      String classifier )
     {
-        Artifact artifact =
-            artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+        Artifact artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+                                                                          classifier );
         artifact.setRepository( repository );
         return artifact;
     }
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AllTests.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AllTests.java
new file mode 100644 (file)
index 0000000..7894f3b
--- /dev/null
@@ -0,0 +1,45 @@
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - Used to Aide in IDE based development.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.*" );
+        //$JUnit-BEGIN$
+        suite.addTest( org.apache.maven.archiva.reporting.database.AllTests.suite() );
+        suite.addTest( org.apache.maven.archiva.reporting.processor.AllTests.suite() );
+        suite.addTest( org.apache.maven.archiva.reporting.reporter.AllTests.suite() );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/AllTests.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/AllTests.java
new file mode 100644 (file)
index 0000000..3e33146
--- /dev/null
@@ -0,0 +1,20 @@
+package org.apache.maven.archiva.reporting.database;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.database" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( ArtifactResultsDatabaseTest.class );
+        suite.addTestSuite( MetadataResultsDatabaseTest.class );
+        suite.addTestSuite( ReportingDatabaseTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabaseTest.java
new file mode 100644 (file)
index 0000000..7b816c6
--- /dev/null
@@ -0,0 +1,171 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+import java.util.List;
+
+/**
+ * ArtifactResultsDatabaseTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsDatabaseTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private Artifact artifact;
+
+    private String processor, problem, reason;
+
+    private ArtifactResultsDatabase database;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+
+        artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
+                                        "classifier", null );
+        processor = "processor";
+        problem = "problem";
+        reason = "reason";
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        release( database );
+
+        super.tearDown();
+    }
+
+    public void testAddNoticeArtifactStringStringString()
+    {
+        database.addNotice( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, artifactResults.getNotices().size() );
+
+        database.addNotice( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, artifactResults.getNotices().size() );
+    }
+
+    public void testAddWarningArtifactStringStringString()
+    {
+        database.addWarning( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, artifactResults.getWarnings().size() );
+
+        database.addWarning( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, artifactResults.getWarnings().size() );
+    }
+
+    public void testAddFailureArtifactStringStringString()
+    {
+        database.addFailure( artifact, processor, problem, reason );
+        ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, artifactResults.getFailures().size() );
+
+        database.addFailure( artifact, processor, problem, reason );
+        artifactResults = database.getArtifactResults( artifact );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, artifactResults.getFailures().size() );
+    }
+
+    public void testFindArtifactResults()
+    {
+        String groupId = "org.test.group";
+
+        Artifact bar = createArtifact( "org.bar", "bar", "2.0" );
+        Artifact foo = createArtifact( groupId, "foo", "1.0" );
+        Artifact fooSources = createArtifactWithClassifier( groupId, "foo", "1.0", "jar", "sources" );
+        Artifact fooJavadoc = createArtifactWithClassifier( groupId, "foo", "1.0", "jar", "javadoc" );
+
+        database.addFailure( bar, processor, problem, "A reason that should not be found." );
+
+        String testprocessor = "test-processor";
+        String testproblem = "test-problem";
+
+        database.addFailure( foo, testprocessor, testproblem, "Test Reason on main jar." );
+        database.addFailure( foo, testprocessor, testproblem, "Someone mistook this for an actual reason." );
+        database.addWarning( foo, testprocessor, testproblem, "Congrats you have a test reason." );
+
+        database.addFailure( fooSources, testprocessor, testproblem, "Sources do not seem to match classes." );
+        database.addWarning( fooJavadoc, testprocessor, testproblem, "Javadoc content makes no sense." );
+
+        ArtifactResults artifactResults = database.getArtifactResults( foo );
+
+        assertEquals( 4, database.getNumFailures() );
+        assertEquals( 2, artifactResults.getFailures().size() );
+
+        List hits = database.findArtifactResults( groupId, "foo", "1.0" );
+        assertNotNull( hits );
+
+//        for ( Iterator it = hits.iterator(); it.hasNext(); )
+//        {
+//            ArtifactResults result = (ArtifactResults) it.next();
+//            System.out.println( " result: " + result.getGroupId() + ":" + result.getArtifactId() + ":"
+//                + result.getVersion() + ":" + result.getClassifier() + ":" + result.getType() );
+//
+//            for ( Iterator itmsgs = result.getFailures().iterator(); itmsgs.hasNext(); )
+//            {
+//                Result res = (Result) itmsgs.next();
+//                String msg = (String) res.getReason();
+//                System.out.println( "    failure: " + msg );
+//            }
+//
+//            for ( Iterator itmsgs = result.getWarnings().iterator(); itmsgs.hasNext(); )
+//            {
+//                Result res = (Result) itmsgs.next();
+//                String msg = (String) res.getReason();
+//                System.out.println( "    warning: " + msg );
+//            }
+//
+//            for ( Iterator itmsgs = result.getNotices().iterator(); itmsgs.hasNext(); )
+//            {
+//                Result res = (Result) itmsgs.next();
+//                String msg = (String) res.getReason();
+//                System.out.println( "    notice: " + msg );
+//            }
+//        }
+
+        assertEquals( "Should find 3 artifacts", 3, hits.size() ); // 3 artifacts
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabaseTest.java
new file mode 100644 (file)
index 0000000..acee253
--- /dev/null
@@ -0,0 +1,113 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * MetadataResultsDatabaseTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsDatabaseTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private MetadataResultsDatabase database;
+
+    private RepositoryMetadata metadata;
+
+    private String processor, problem, reason;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+
+        Artifact artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope",
+                                                 "type", "classifier", null );
+        metadata = new ArtifactRepositoryMetadata( artifact );
+
+        processor = "processor";
+        problem = "problem";
+        reason = "reason";
+    }
+
+    protected void tearDown()
+        throws Exception
+    {
+        release( database );
+
+        super.tearDown();
+    }
+
+    public void testAddNoticeRepositoryMetadataStringStringString()
+    {
+        database.addNotice( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, metadataResults.getNotices().size() );
+
+        database.addNotice( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumNotices() );
+        assertEquals( 1, metadataResults.getNotices().size() );
+    }
+
+    public void testAddWarningRepositoryMetadataStringStringString()
+    {
+        database.addWarning( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, metadataResults.getWarnings().size() );
+
+        database.addWarning( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumWarnings() );
+        assertEquals( 1, metadataResults.getWarnings().size() );
+    }
+
+    public void testAddFailureRepositoryMetadataStringStringString()
+    {
+        database.addFailure( metadata, processor, problem, reason );
+        MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, metadataResults.getFailures().size() );
+
+        database.addFailure( metadata, processor, problem, reason );
+        metadataResults = database.getMetadataResults( metadata );
+
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 1, metadataResults.getFailures().size() );
+    }
+}
index af2d45149a1a353d368dda81ed2b930b662091ee..e03d59c4fa27a2e129a80d80384c1c1014c38717 100644 (file)
@@ -19,14 +19,7 @@ package org.apache.maven.archiva.reporting.database;
  * under the License.
  */
 
-import junit.framework.TestCase;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
 
 /**
  * Test for {@link ReportingDatabase}.
@@ -35,117 +28,28 @@ import org.apache.maven.artifact.versioning.VersionRange;
  * @version $Id$
  */
 public class ReportingDatabaseTest
-    extends TestCase
+    extends AbstractRepositoryReportsTestCase
 {
-    private Artifact artifact;
-
-    private String processor, problem, reason;
-
-    private ReportingDatabase reportingDatabase;
-
-    private RepositoryMetadata metadata;
+    private ReportingDatabase database;
 
     protected void setUp()
         throws Exception
     {
         super.setUp();
-        artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
-                                        "classifier", null );
-        processor = "processor";
-        problem = "problem";
-        reason = "reason";
-        reportingDatabase = new ReportingDatabase( null );
-
-        metadata = new ArtifactRepositoryMetadata( artifact );
-    }
-
-    public void testAddNoticeArtifactStringStringString()
-    {
-        reportingDatabase.addNotice( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, artifactResults.getNotices().size() );
-
-        reportingDatabase.addNotice( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, artifactResults.getNotices().size() );
+        database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
     }
 
-    public void testAddWarningArtifactStringStringString()
-    {
-        reportingDatabase.addWarning( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, artifactResults.getWarnings().size() );
-
-        reportingDatabase.addWarning( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, artifactResults.getWarnings().size() );
-    }
-
-    public void testAddFailureArtifactStringStringString()
-    {
-        reportingDatabase.addFailure( artifact, processor, problem, reason );
-        ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, artifactResults.getFailures().size() );
-
-        reportingDatabase.addFailure( artifact, processor, problem, reason );
-        artifactResults = reportingDatabase.getArtifactResults( artifact );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, artifactResults.getFailures().size() );
-    }
-
-    public void testAddNoticeRepositoryMetadataStringStringString()
-    {
-        reportingDatabase.addNotice( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, metadataResults.getNotices().size() );
-
-        reportingDatabase.addNotice( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumNotices() );
-        assertEquals( 1, metadataResults.getNotices().size() );
-    }
-
-    public void testAddWarningRepositoryMetadataStringStringString()
+    protected void tearDown()
+        throws Exception
     {
-        reportingDatabase.addWarning( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, metadataResults.getWarnings().size() );
-
-        reportingDatabase.addWarning( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumWarnings() );
-        assertEquals( 1, metadataResults.getWarnings().size() );
+        release( database );
+        super.tearDown();
     }
 
-    public void testAddFailureRepositoryMetadataStringStringString()
+    public void testLookup()
     {
-        reportingDatabase.addFailure( metadata, processor, problem, reason );
-        MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, metadataResults.getFailures().size() );
-
-        reportingDatabase.addFailure( metadata, processor, problem, reason );
-        metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 1, metadataResults.getFailures().size() );
+        assertNotNull( "database should not be null.", database );
+        assertNotNull( "database.artifactDatabase should not be null.", database.getArtifactDatabase() );
+        assertNotNull( "database.metadataDatabase should not be null.", database.getMetadataDatabase() );
     }
 }
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/AllTests.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/AllTests.java
new file mode 100644 (file)
index 0000000..953cb4c
--- /dev/null
@@ -0,0 +1,24 @@
+package org.apache.maven.archiva.reporting.processor;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.processor" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( LocationArtifactReportProcessorTest.class );
+        suite.addTestSuite( DuplicateArtifactFileReportProcessorTest.class );
+        suite.addTestSuite( OldSnapshotArtifactReportProcessorTest.class );
+        suite.addTestSuite( DependencyArtifactReportProcessorTest.class );
+        suite.addTestSuite( OldArtifactReportProcessorTest.class );
+        suite.addTestSuite( InvalidPomArtifactReportProcessorTest.class );
+        suite.addTestSuite( BadMetadataReportProcessorTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
index ad33b0d2f6ea12daab4f083ee6a596b8888831d9..feebb72d01faa87b06b43d41277aeb2ff1741469 100644 (file)
@@ -19,9 +19,9 @@ package org.apache.maven.archiva.reporting.processor;
  * under the License.
  */
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
 import org.apache.maven.archiva.reporting.model.MetadataResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -36,6 +36,11 @@ import org.apache.maven.artifact.repository.metadata.Versioning;
 
 import java.util.Iterator;
 
+/**
+ * BadMetadataReportProcessorTest 
+ *
+ * @version $Id$
+ */
 public class BadMetadataReportProcessorTest
     extends AbstractRepositoryReportsTestCase
 {
@@ -43,7 +48,7 @@ public class BadMetadataReportProcessorTest
 
     private MetadataReportProcessor badMetadataReportProcessor;
 
-    private ReportingDatabase reportingDatabase;
+    private MetadataResultsDatabase database;
 
     protected void setUp()
         throws Exception
@@ -52,12 +57,19 @@ public class BadMetadataReportProcessorTest
 
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
 
-        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
 
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
     }
 
+    protected void tearDown()
+        throws Exception
+    {
+        release( artifactFactory );
+        release( badMetadataReportProcessor );
+        super.tearDown();
+    }
+    
     public void testMetadataMissingLastUpdated()
     {
         Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
@@ -68,9 +80,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -87,9 +99,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -106,41 +118,41 @@ public class BadMetadataReportProcessorTest
         if ( alpha1First )
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         else
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         result = (Result) failures.next();
         if ( !alpha1First )
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         else
         {
             assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                              .getReason() );
         }
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
     public void testSnapshotMetadataMissingVersioning()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -162,9 +174,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
@@ -178,9 +190,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -189,8 +201,8 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -206,9 +218,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -217,8 +229,8 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -233,9 +245,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -244,14 +256,14 @@ public class BadMetadataReportProcessorTest
         assertMetadata( metadata, results );
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+                          .getReason() );
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
         assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+                          .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -261,9 +273,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
@@ -274,17 +286,16 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
-                      result.getReason() );
+        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository", result.getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -293,18 +304,17 @@ public class BadMetadataReportProcessorTest
         RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
-                      result.getReason() );
+        assertEquals( "check reason", "Plugin snapshot-artifact is present in the repository but "
+            + "missing in the metadata.", result.getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -316,22 +326,22 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         Result result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3", result
+            .getReason() );
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -341,9 +351,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -354,8 +364,8 @@ public class BadMetadataReportProcessorTest
         assertTrue( "check there is a 2nd failure", failures.hasNext() );
         result = (Result) failures.next();
         // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
-                      result.getReason() );
+        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
@@ -365,9 +375,9 @@ public class BadMetadataReportProcessorTest
         metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
         metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -380,8 +390,8 @@ public class BadMetadataReportProcessorTest
 
     public void testValidSnapshotMetadata()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         Snapshot snapshot = new Snapshot();
         snapshot.setBuildNumber( 1 );
@@ -389,16 +399,16 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertFalse( "check there are no failures", failures.hasNext() );
     }
 
     public void testInvalidSnapshotMetadata()
     {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+                                                                 "1.0-alpha-1-SNAPSHOT", "type" );
 
         Snapshot snapshot = new Snapshot();
         snapshot.setBuildNumber( 2 );
@@ -406,9 +416,9 @@ public class BadMetadataReportProcessorTest
 
         RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
 
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+        badMetadataReportProcessor.processMetadata( metadata, repository );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -416,16 +426,21 @@ public class BadMetadataReportProcessorTest
         Result result = (Result) failures.next();
         assertMetadata( metadata, results );
         // TODO: should be more robust
-        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
-                      result.getReason() );
+        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.", result
+            .getReason() );
         assertFalse( "check no more failures", failures.hasNext() );
     }
 
     private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
     {
-        assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
-        assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
-        assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getGroupId() ), results.getGroupId() );
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getArtifactId() ), results.getArtifactId() );
+        assertEquals( "check metadata", StringUtils.defaultString( metadata.getBaseVersion() ), results.getVersion() );
     }
 
     private Plugin createMetadataPlugin( String artifactId, String prefix )
index bf1ee803ccae35fcfe617f4377b1a325a216dd69..afbb2037a9c59308985eec68db5130f9dd552bf9 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -43,7 +42,7 @@ public class DependencyArtifactReportProcessorTest
 
     private static final String VALID_VERSION = "1.0-alpha-1";
 
-    private ReportingDatabase reportingDatabase;
+    private ArtifactResultsDatabase database;
 
     private Model model;
 
@@ -58,27 +57,24 @@ public class DependencyArtifactReportProcessorTest
     {
         super.setUp();
         model = new Model();
-        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
-
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
     }
 
     public void testArtifactFoundButNoDirectDependencies()
     {
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     private Artifact createValidArtifact()
     {
-        Artifact projectArtifact =
-            artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+        Artifact projectArtifact = artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID,
+                                                                          VALID_VERSION );
         projectArtifact.setRepository( repository );
         return projectArtifact;
     }
@@ -87,11 +83,11 @@ public class DependencyArtifactReportProcessorTest
     {
         Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
         artifact.setRepository( repository );
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -106,10 +102,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createValidDependency();
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     private Dependency createValidDependency()
@@ -124,10 +120,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createValidDependency();
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithValidMultipleDependencies()
@@ -140,10 +136,10 @@ public class DependencyArtifactReportProcessorTest
         model.addDependency( dependency );
 
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithAnInvalidDependency()
@@ -156,18 +152,17 @@ public class DependencyArtifactReportProcessorTest
         model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
 
         Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
         Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
-                      result.getReason() );
+        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ), result.getReason() );
     }
 
     public void testValidArtifactWithInvalidDependencyGroupId()
@@ -177,12 +172,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -206,12 +201,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -226,11 +221,11 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -245,12 +240,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
@@ -265,10 +260,10 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
     }
 
     public void testValidArtifactWithMissingDependencyVersion()
@@ -278,12 +273,12 @@ public class DependencyArtifactReportProcessorTest
         Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
         model.addDependency( dependency );
 
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
+        processor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getIterator();
         ArtifactResults results = (ArtifactResults) failures.next();
         assertFalse( failures.hasNext() );
         failures = results.getFailures().iterator();
index dbbb3bd8ce5236f27b11412fc996aff0231e8286..6ab299a5c4c8a28a1e654b5810cd527471d517d6 100644 (file)
@@ -24,8 +24,7 @@ import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
 import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
 import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
 import org.apache.maven.model.Model;
@@ -49,7 +48,7 @@ public class DuplicateArtifactFileReportProcessorTest
 
     File indexDirectory;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     protected void setUp()
         throws Exception
@@ -60,8 +59,11 @@ public class DuplicateArtifactFileReportProcessorTest
         FileUtils.deleteDirectory( indexDirectory );
 
         artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+        
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        
         artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
-        System.out.println( "artifact = " + artifact );
+        
         model = new Model();
 
         RepositoryArtifactIndexFactory factory =
@@ -75,9 +77,6 @@ public class DuplicateArtifactFileReportProcessorTest
         index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
 
         processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     public void testNullArtifactFile()
@@ -85,32 +84,32 @@ public class DuplicateArtifactFileReportProcessorTest
     {
         artifact.setFile( null );
 
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 1, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnAlreadyIndexedArtifact()
         throws Exception
     {
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnDifferentGroupId()
         throws Exception
     {
         artifact.setGroupId( "different.groupId" );
-        processor.processArtifact( artifact, model, reportDatabase );
+        processor.processArtifact( artifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testSuccessOnNewArtifact()
@@ -118,11 +117,11 @@ public class DuplicateArtifactFileReportProcessorTest
     {
         Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
 
-        processor.processArtifact( newArtifact, model, reportDatabase );
+        processor.processArtifact( newArtifact, model );
 
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no failures", 0, database.getNumFailures() );
     }
 
     public void testFailure()
@@ -132,11 +131,11 @@ public class DuplicateArtifactFileReportProcessorTest
                                              artifact.getVersion(), artifact.getType() );
         duplicate.setFile( artifact.getFile() );
 
-        processor.processArtifact( duplicate, model, reportDatabase );
+        processor.processArtifact( duplicate, model );
 
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
+        assertEquals( "Check warnings", 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
+        assertEquals( "Check no failures", 1, database.getNumFailures() );
     }
 
     private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
index 049ae48d54576fb692da74e4cff9ae8e1718824c..f37bd20ac03064f9f789e56fca6f51ab9946188b 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 
 /**
@@ -32,16 +31,14 @@ public class InvalidPomArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     /**
@@ -51,8 +48,8 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
 
@@ -63,10 +60,10 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
 
@@ -77,9 +74,9 @@ public class InvalidPomArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 }
index 9eb59b3a7ca5b31c675b8bd6b16287727a1b4015..134295cb9abbebbd78083222a54f27137f3a3b71 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.model.Model;
 import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
@@ -40,16 +39,14 @@ public class LocationArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     /**
@@ -61,10 +58,10 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -78,10 +75,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -94,10 +91,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( pomArtifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -111,10 +108,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -129,10 +126,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -147,10 +144,10 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -164,9 +161,9 @@ public class LocationArtifactReportProcessorTest
         Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
 
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        artifactReportProcessor.processArtifact( artifact, model );
 
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     /**
@@ -180,8 +177,8 @@ public class LocationArtifactReportProcessorTest
 
         Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
         Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, model );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     private Model readPom( String path )
@@ -210,8 +207,8 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
     /**
@@ -223,8 +220,8 @@ public class LocationArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
     }
 
 }
index 117a25fd1d22639ddf783d5a76c257d7c9b87935..325ba6a34970e794251914c4a7ce53b0787100ee 100644 (file)
@@ -20,8 +20,7 @@ package org.apache.maven.archiva.reporting.processor;
  */
 
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -38,27 +37,25 @@ public class OldArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
     }
 
     public void testOldArtifact()
     {
         Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 1, database.getNumNotices() );
+        ArtifactResults results = (ArtifactResults) database.getIterator().next();
         assertEquals( artifact.getArtifactId(), results.getArtifactId() );
         assertEquals( artifact.getGroupId(), results.getGroupId() );
         assertEquals( artifact.getVersion(), results.getVersion() );
@@ -78,10 +75,10 @@ public class OldArtifactReportProcessorTest
 
         Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testMissingArtifact()
@@ -91,7 +88,7 @@ public class OldArtifactReportProcessorTest
 
         try
         {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            artifactReportProcessor.processArtifact( artifact, null );
             fail( "Should not have passed" );
         }
         catch ( IllegalStateException e )
index b09732ff5b8d563a86ab0c60907f6cf85c60ad0b..680373ea33db85272e73a69833121b09510c9e7f 100644 (file)
@@ -21,8 +21,7 @@ package org.apache.maven.archiva.reporting.processor;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.Result;
 import org.apache.maven.artifact.Artifact;
@@ -40,7 +39,7 @@ public class OldSnapshotArtifactReportProcessorTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportDatabase;
+    private ArtifactResultsDatabase database;
 
     private File tempRepository;
 
@@ -48,11 +47,10 @@ public class OldSnapshotArtifactReportProcessorTest
         throws Exception
     {
         super.setUp();
-        artifactReportProcessor =
-            (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE,
+                                                                    "old-snapshot-artifact" );
 
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
         tempRepository = getTestFile( "target/test-repository" );
         FileUtils.deleteDirectory( tempRepository );
     }
@@ -61,11 +59,11 @@ public class OldSnapshotArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 1, database.getNumNotices() );
+        Iterator artifactIterator = database.getIterator();
         assertArtifactResults( artifactIterator, artifact );
     }
 
@@ -86,20 +84,20 @@ public class OldSnapshotArtifactReportProcessorTest
     {
         Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testNonSnapshotArtifact()
     {
         Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testNewSnapshotArtifact()
@@ -113,13 +111,13 @@ public class OldSnapshotArtifactReportProcessorTest
         String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
         FileUtils.writeStringToFile( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ), "foo", null );
 
-        Artifact artifact =
-            createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
+        Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date
+            + "-1" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check no notices", 0, database.getNumNotices() );
     }
 
     public void testTooManySnapshotArtifact()
@@ -137,15 +135,15 @@ public class OldSnapshotArtifactReportProcessorTest
 
         for ( int i = 1; i <= 5; i++ )
         {
-            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
-                                                              "1.0-alpha-1-" + date + "-" + i );
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", "1.0-alpha-1-"
+                + date + "-" + i );
+            artifactReportProcessor.processArtifact( artifact, null );
         }
 
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "Check notices", 3, database.getNumNotices() );
+        Iterator artifactIterator = database.getIterator();
         for ( int i = 1; i <= 3; i++ )
         {
             String version = "1.0-alpha-1-" + date + "-" + i;
@@ -161,7 +159,7 @@ public class OldSnapshotArtifactReportProcessorTest
 
         try
         {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            artifactReportProcessor.processArtifact( artifact, null );
             fail( "Should not have passed" );
         }
         catch ( IllegalStateException e )
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AllTests.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AllTests.java
new file mode 100644 (file)
index 0000000..b9bffef
--- /dev/null
@@ -0,0 +1,45 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+    public static Test suite()
+    {
+        TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.reporter" );
+        //$JUnit-BEGIN$
+        suite.addTestSuite( DefaultArtifactReporterTest.class );
+        suite.addTestSuite( ChecksumMetadataReporterTest.class );
+        suite.addTestSuite( ChecksumArtifactReporterTest.class );
+        //$JUnit-END$
+        return suite;
+    }
+
+}
index 810ff1fc369e8a7cff46e5ec462085e9356f0fd3..3fc9acd0b73dfd6aca89c3e6ae35ff3d8acbb328 100644 (file)
@@ -19,21 +19,12 @@ package org.apache.maven.archiva.reporting.reporter;
  * under the License.
  */
 
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
-import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
 import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
 import org.codehaus.plexus.digest.DigesterException;
 
-import java.io.File;
 import java.io.IOException;
-import java.util.Iterator;
 
 /**
  * This class tests the ChecksumArtifactReportProcessor.
@@ -44,19 +35,14 @@ public class ChecksumArtifactReporterTest
 {
     private ArtifactReportProcessor artifactReportProcessor;
 
-    private ReportingDatabase reportingDatabase;
-
-    private MetadataReportProcessor metadataReportProcessor;
+    private ArtifactResultsDatabase database;
 
     public void setUp()
         throws Exception
     {
         super.setUp();
         artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
-        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
+        database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
     }
 
     /**
@@ -70,10 +56,10 @@ public class ChecksumArtifactReporterTest
 
         Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 0, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
     }
 
     /**
@@ -85,79 +71,9 @@ public class ChecksumArtifactReporterTest
         String s1 = "1.0";
         Artifact artifact = createArtifact( "checksumTest", s, s1 );
 
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the valid checksum of a metadata file.
-     * The reportingDatabase should report 2 success validation.
-     */
-    public void testChecksumMetadataReporterSuccess()
-        throws DigesterException, IOException
-    {
-        createMetadataFile( "VALID" );
-        createMetadataFile( "INVALID" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        //Version level metadata
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Artifact level metadata
-        metadata = new ArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Group level metadata
-        metadata = new GroupRepositoryMetadata( "checksumTest" );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-    }
-
-    /**
-     * Test the corrupted checksum of a metadata file.
-     * The reportingDatabase must report 2 failures.
-     */
-    public void testChecksumMetadataReporterFailure()
-    {
-        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-    }
-
-    /**
-     * Test the conditional when the checksum files of the artifact & metadata do not exist.
-     */
-    public void testChecksumFilesDoNotExist()
-        throws DigesterException, IOException
-    {
-        createChecksumFile( "VALID" );
-        createMetadataFile( "VALID" );
-        deleteChecksumFiles( "jar" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-
-        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, database.getNumFailures() );
+        assertEquals( 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
     }
 }
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumMetadataReporterTest.java
new file mode 100644 (file)
index 0000000..6cd3583
--- /dev/null
@@ -0,0 +1,135 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * ChecksumMetadataReporterTest 
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ChecksumMetadataReporterTest
+    extends AbstractChecksumArtifactReporterTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private MetadataReportProcessor metadataReportProcessor;
+
+    private MetadataResultsDatabase database;
+
+    private ArtifactResultsDatabase artifactsDatabase;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+        database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+        artifactsDatabase = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+    }
+
+    /**
+     * Test the valid checksum of a metadata file.
+     * The reportingDatabase should report 2 success validation.
+     */
+    public void testChecksumMetadataReporterSuccess()
+        throws DigesterException, IOException
+    {
+        createMetadataFile( "VALID" );
+        createMetadataFile( "INVALID" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        //Version level metadata
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        //Artifact level metadata
+        metadata = new ArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        //Group level metadata
+        metadata = new GroupRepositoryMetadata( "checksumTest" );
+        metadataReportProcessor.processMetadata( metadata, repository );
+    }
+
+    /**
+     * Test the corrupted checksum of a metadata file.
+     * The reportingDatabase must report 2 failures.
+     */
+    public void testChecksumMetadataReporterFailure()
+    {
+        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        Iterator failures = database.getIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+    }
+
+    /**
+     * Test the conditional when the checksum files of the artifact & metadata do not exist.
+     */
+    public void testChecksumFilesDoNotExist()
+        throws DigesterException, IOException
+    {
+        createChecksumFile( "VALID" );
+        createMetadataFile( "VALID" );
+        deleteChecksumFiles( "jar" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null );
+        assertEquals( 1, artifactsDatabase.getNumFailures() );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository );
+
+        Iterator failures = database.getIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+
+        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+    }
+
+}
index 2be9571e10af4aed68f0ed46d4fb11c2291fbdfd..5c4c426149670d788e08e5b1f2e27f5066ec866f 100644 (file)
@@ -19,9 +19,9 @@ package org.apache.maven.archiva.reporting.reporter;
  * under the License.
  */
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
 import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
 import org.apache.maven.archiva.reporting.model.ArtifactResults;
 import org.apache.maven.archiva.reporting.model.MetadataResults;
 import org.apache.maven.archiva.reporting.model.Result;
@@ -34,12 +34,14 @@ import org.apache.maven.artifact.repository.metadata.Versioning;
 import java.util.Iterator;
 
 /**
+ * DefaultArtifactReporterTest 
  *
+ * @version $Id$
  */
 public class DefaultArtifactReporterTest
     extends AbstractRepositoryReportsTestCase
 {
-    private ReportingDatabase reportingDatabase;
+    private ReportingDatabase database;
 
     private RepositoryMetadata metadata;
 
@@ -49,23 +51,41 @@ public class DefaultArtifactReporterTest
 
     private Artifact artifact;
 
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
+
+        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+
+        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+    }
+
     public void testEmptyArtifactReporter()
     {
-        assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-        assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
-        assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
+        assertEquals( "No failures", 0, database.getNumFailures() );
+        assertEquals( "No warnings", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
+        assertFalse( "No artifact failures", database.getArtifactIterator().hasNext() );
+        assertFalse( "No metadata failures", database.getMetadataIterator().hasNext() );
     }
 
     public void testMetadataSingleFailure()
     {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getMetadataIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -80,20 +100,28 @@ public class DefaultArtifactReporterTest
 
     private void assertMetadata( MetadataResults result )
     {
-        assertEquals( "check failure cause", metadata.getGroupId(), result.getGroupId() );
-        assertEquals( "check failure cause", metadata.getArtifactId(), result.getArtifactId() );
-        assertEquals( "check failure cause", metadata.getBaseVersion(), result.getVersion() );
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        assertEquals( "check failure cause", StringUtils.defaultString( metadata.getGroupId() ), result.getGroupId() );
+        assertEquals( "check failure cause", StringUtils.defaultString( metadata.getArtifactId() ), result
+            .getArtifactId() );
+        assertEquals( "check failure cause", StringUtils.defaultString( metadata.getBaseVersion() ), result
+            .getVersion() );
     }
 
     public void testMetadataMultipleFailures()
     {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+        database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getMetadataIterator();
+        Iterator failures = database.getMetadataIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         MetadataResults results = (MetadataResults) failures.next();
         failures = results.getFailures().iterator();
@@ -113,12 +141,12 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataSingleWarning()
     {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 1, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -133,13 +161,13 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataMultipleWarnings()
     {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+        database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 2, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -159,12 +187,12 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataSingleNotice()
     {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check notices", 1, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -179,13 +207,13 @@ public class DefaultArtifactReporterTest
 
     public void testMetadataMultipleNotices()
     {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
+        database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 2, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getMetadataIterator();
+        Iterator warnings = database.getMetadataIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         MetadataResults results = (MetadataResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -205,12 +233,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleFailure()
     {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getArtifactIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         ArtifactResults results = (ArtifactResults) failures.next();
         failures = results.getFailures().iterator();
@@ -225,22 +253,31 @@ public class DefaultArtifactReporterTest
 
     private void assertArtifact( ArtifactResults results )
     {
-        assertEquals( "check failure cause", artifact.getGroupId(), results.getGroupId() );
-        assertEquals( "check failure cause", artifact.getArtifactId(), results.getArtifactId() );
-        assertEquals( "check failure cause", artifact.getVersion(), results.getVersion() );
-        assertEquals( "check failure cause", artifact.getClassifier(), results.getClassifier() );
-        assertEquals( "check failure cause", artifact.getType(), results.getType() );
+        /* The funky StringUtils.defaultString() is used because of database constraints.
+         * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+         * type, classifier.
+         * This also means that none of those fields may be null.  however, that doesn't eliminate the
+         * ability to have an empty string in place of a null.
+         */
+
+        assertEquals( "check failure cause", StringUtils.defaultString( artifact.getGroupId() ), results.getGroupId() );
+        assertEquals( "check failure cause", StringUtils.defaultString( artifact.getArtifactId() ), results
+            .getArtifactId() );
+        assertEquals( "check failure cause", StringUtils.defaultString( artifact.getVersion() ), results.getVersion() );
+        assertEquals( "check failure cause", StringUtils.defaultString( artifact.getClassifier() ), results
+            .getClassifier() );
+        assertEquals( "check failure cause", StringUtils.defaultString( artifact.getType() ), results.getType() );
     }
 
     public void testArtifactMultipleFailures()
     {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
+        database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator failures = reportingDatabase.getArtifactIterator();
+        Iterator failures = database.getArtifactIterator();
         assertTrue( "check there is a failure", failures.hasNext() );
         ArtifactResults results = (ArtifactResults) failures.next();
         failures = results.getFailures().iterator();
@@ -260,12 +297,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleWarning()
     {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 1, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -280,13 +317,13 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactMultipleWarnings()
     {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
+        database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 2, database.getNumWarnings() );
+        assertEquals( "check no notices", 0, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getWarnings().iterator();
@@ -306,12 +343,12 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactSingleNotice()
     {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check notices", 1, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -326,13 +363,13 @@ public class DefaultArtifactReporterTest
 
     public void testArtifactMultipleNotices()
     {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
+        database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, database.getNumFailures() );
+        assertEquals( "warnings count", 0, database.getNumWarnings() );
+        assertEquals( "check no notices", 2, database.getNumNotices() );
 
-        Iterator warnings = reportingDatabase.getArtifactIterator();
+        Iterator warnings = database.getArtifactIterator();
         assertTrue( "check there is a failure", warnings.hasNext() );
         ArtifactResults results = (ArtifactResults) warnings.next();
         warnings = results.getNotices().iterator();
@@ -350,22 +387,4 @@ public class DefaultArtifactReporterTest
         assertFalse( "no more warnings", warnings.hasNext() );
     }
 
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-
-        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
 }
diff --git a/archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml b/archiva-reports-standard/src/test/resources/META-INF/plexus/components.xml
new file mode 100644 (file)
index 0000000..6e27005
--- /dev/null
@@ -0,0 +1,9 @@
+<component-set>
+  <components>
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+    </component>
+  </components>
+</component-set>
diff --git a/archiva-reports-standard/src/test/resources/log4j.properties b/archiva-reports-standard/src/test/resources/log4j.properties
new file mode 100644 (file)
index 0000000..9b2c26a
--- /dev/null
@@ -0,0 +1,10 @@
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
index 0d9da15daf6cf3dbfb6b3a46f412055c62cd5bd0..5ae0123925e47ad27efc7375d09084f8e6361d99 100644 (file)
         <requirement>
           <role>org.codehaus.plexus.digest.Digester</role>
           <role-hint>md5</role-hint>
+          <field-name>digester</field-name>
         </requirement>
         <requirement>
           <role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
+          <field-name>indexFactory</field-name>
+        </requirement>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
         </requirement>
       </requirements>
       <configuration>
index 64be4813ea86fb431def5ae86327dec3b192096e..016361e9be517e3068b80f3a9ab51bb93d60a99a 100644 (file)
       <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
       <role-hint>old-artifact</role-hint>
       <implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
+        </requirement>
+      </requirements>
       <configuration>
         <maxAge>10</maxAge>
       </configuration>
index 64acabcf2a572251b78f20f235f8756da9b25007..779aa8c243c9dba9bc78f50a757948bbf15f8880 100644 (file)
       <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
       <role-hint>old-snapshot-artifact</role-hint>
       <implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+          <field-name>database</field-name>
+        </requirement>
+      </requirements>
       <configuration>
         <maxAge>3600</maxAge>
         <maxSnapshots>2</maxSnapshots>
index 78cbd1282ad780d7e3ea505e9e7e7b8f7de951a4..c0064555329e0f4cb734d2ff3804fc93cc144b42 100644 (file)
       <artifactId>plexus-component-api</artifactId>
     </dependency>
   </dependencies>
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.plexus</groupId>
+        <artifactId>plexus-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>merge</id>
+            <goals>
+              <goal>merge-descriptors</goal>
+            </goals>
+            <configuration>
+              <descriptors>
+                <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+                <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
 </project>
diff --git a/archiva-site/src/site/resources/images/graph-multimodule.dot b/archiva-site/src/site/resources/images/graph-multimodule.dot
new file mode 100644 (file)
index 0000000..759dab3
--- /dev/null
@@ -0,0 +1,239 @@
+// Auto generated dot file from plexus-graphing-graphviz.
+digraph gid {
+
+  // Graph Defaults
+  graph [
+    rankdir="TB"
+  ];
+
+  // Node Defaults.
+  node [
+    fontname="Helvetica",
+    fontsize="8",
+    shape="box"
+  ];
+
+  // Edge Defaults.
+  edge [
+    arrowsize="0.8"
+    fontsize="8",
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_REPOSITORY_LAYER10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-repository-layer\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" [
+    label="org.apache.maven.archiva\narchiva-webapp\n1.0-SNAPSHOT\nwar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" [
+    label="org.apache.maven.archiva\narchiva-plexus-application\n1.0-SNAPSHOT\nplexus-application",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-core\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-configuration\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-proxy\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-reports-standard\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CLI10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-cli\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-converter\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_APPLET10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-applet\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_RUNTIME10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-plexus-runtime\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-discoverer\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_SECURITY10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-security\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Node
+  "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+    label="org.apache.maven.archiva\narchiva-indexer\n1.0-SNAPSHOT\njar",
+    fontsize="8",
+    shape=box
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_APPLET10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" -> "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_RUNTIME10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CLI10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPOSITORY_LAYER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+
+  // Edge
+  "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_SECURITY10_SNAPSHOTJAR" [
+    arrowtail=none,
+    arrowhead=normal
+  ];
+}
diff --git a/archiva-site/src/site/resources/images/graph-multimodule.png b/archiva-site/src/site/resources/images/graph-multimodule.png
new file mode 100644 (file)
index 0000000..a7450d1
Binary files /dev/null and b/archiva-site/src/site/resources/images/graph-multimodule.png differ
index b6353ed0f610d751c77a68e6b6bea00dc9ee8ad2..c9d02499a4ef6e8613e32beb7d6579587556eecb 100644 (file)
     </dependency>
     <dependency>
       <groupId>org.codehaus.plexus</groupId>
-      <artifactId>plexus-log4j-logging</artifactId>
-      <version>1.1-alpha-2</version>
+      <artifactId>plexus-slf4j-logging</artifactId>
+      <version>1.1-alpha-1-SNAPSHOT</version>
       <scope>runtime</scope>
     </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-log4j12</artifactId>
+      <version>1.2</version>
+    </dependency>
     <dependency>
       <groupId>commons-logging</groupId>
       <artifactId>commons-logging</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-indexer</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-discoverer</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-configuration</artifactId>
+      <artifactId>archiva-proxy</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-proxy</artifactId>
+      <artifactId>archiva-core</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
-      <artifactId>archiva-core</artifactId>
+      <artifactId>archiva-common</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.maven.archiva</groupId>
     <dependency>
       <groupId>org.apache.derby</groupId>
       <artifactId>derby</artifactId>
+      <version>10.1.3.1</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
index 7e260ebfe71bb628e3d8dd22b0c81a9c7371f458..bd1d76c14aeea34f8eb8de4dfc7645e30c9e05e1 100644 (file)
 <!DOCTYPE Configure PUBLIC "-//Mort Bay Consulting//DTD Configure//EN" "http://jetty.mortbay.org/configure.dtd">
 <Configure class="org.mortbay.jetty.webapp.WebAppContext">
 
-<New id="validation_mail" class="org.mortbay.jetty.plus.naming.Resource">
-  <Arg>mail/Session</Arg>
-  <Arg>
-    <New class="org.mortbay.naming.factories.MailSessionReference">
-      <Set name="user"></Set>
-      <Set name="password"></Set>
-      <Set name="properties">
-        <New class="java.util.Properties">
-          <Put name="mail.smtp.host">localhost</Put>
-          <Put name="mail.smtp.port">25</Put> <!-- TODO: shouldn't this default? -->
-        </New>
-      </Set>
-    </New>
-  </Arg>
-</New>
+  <New id="validation_mail" class="org.mortbay.jetty.plus.naming.Resource">
+    <Arg>mail/Session</Arg>
+    <Arg>
+      <New class="org.mortbay.naming.factories.MailSessionReference">
+        <Set name="user"></Set>
+        <Set name="password"></Set>
+        <Set name="properties">
+          <New class="java.util.Properties">
+            <Put name="mail.smtp.host">localhost</Put>
+            <Put name="mail.smtp.port">25</Put> <!-- TODO: shouldn't this default? -->
+          </New>
+        </Set>
+      </New>
+    </Arg>
+  </New>
 
+  <!-- Archiva database -->
+  <New id="archiva" class="org.mortbay.jetty.plus.naming.Resource">
+    <Arg>jdbc/archiva</Arg>
+    <Arg>
+      <New class="org.apache.derby.jdbc.EmbeddedDataSource">
+        <Set name="DatabaseName">target/databases/archiva</Set>
+        <Set name="user">sa</Set>
+        <Set name="createDatabase">create</Set>
+      </New>
+    </Arg>
+  </New>
+
+  <New id="archivaShutdown" class="org.mortbay.jetty.plus.naming.Resource">
+    <Arg>jdbc/archivaShutdown</Arg>
+    <Arg>
+      <New class="org.apache.derby.jdbc.EmbeddedDataSource">
+        <Set name="DatabaseName">target/databases/archiva</Set>
+        <Set name="user">sa</Set>
+        <Set name="shutdownDatabase">shutdown</Set>
+      </New>
+    </Arg>
+  </New>
+
+  <!-- Users / Security Database -->
   <New id="users" class="org.mortbay.jetty.plus.naming.Resource">
     <Arg>jdbc/users</Arg>
     <Arg>
       <New class="org.apache.derby.jdbc.EmbeddedDataSource">
-        <Set name="DatabaseName">target/database</Set>
+        <Set name="DatabaseName">target/databases/users</Set>
         <Set name="user">sa</Set>
         <Set name="createDatabase">create</Set>
       </New>
     </Arg>
   </New>
-  <New id="shutdown" class="org.mortbay.jetty.plus.naming.Resource">
-    <Arg>jdbc/shutdown</Arg>
+
+  <New id="usersShutdown" class="org.mortbay.jetty.plus.naming.Resource">
+    <Arg>jdbc/usersShutdown</Arg>
     <Arg>
       <New class="org.apache.derby.jdbc.EmbeddedDataSource">
-        <Set name="DatabaseName">target/database</Set>
+        <Set name="DatabaseName">target/databases/users</Set>
         <Set name="user">sa</Set>
         <Set name="shutdownDatabase">shutdown</Set>
       </New>
index de64abe26e0bbcadae9cf95b8736edc0f09f6d02..952cc674ccf6344800d7293c55aa87ee2eba2a50 100644 (file)
@@ -19,31 +19,15 @@ package org.apache.maven.archiva.web.action;
  * under the License.
  */
 
-import com.opensymphony.xwork.Preparable;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
 import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
 import org.apache.maven.archiva.security.ArchivaRoleConstants;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
 import org.codehaus.plexus.security.rbac.Resource;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureAction;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException;
 import org.codehaus.plexus.xwork.action.PlexusActionSupport;
 
-import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Repository reporting.
@@ -53,185 +37,23 @@ import java.util.Map;
  */
 public class ReportsAction
     extends PlexusActionSupport
-    implements Preparable, SecureAction
+    implements SecureAction
 {
     /**
      * @plexus.requirement
      */
-    private ArchivaConfiguration archivaConfiguration;
+    private ReportingDatabase database;
 
-    /**
-     * @plexus.requirement
-     */
-    private ConfiguredRepositoryFactory factory;
-
-    private List databases;
-
-    private String repositoryId;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportExecutor executor;
-
-    private Configuration configuration;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.reporting.group.ReportGroup"
-     */
-    private Map reports;
-
-    private String reportGroup = DEFAULT_REPORT_GROUP;
-
-    private static final String DEFAULT_REPORT_GROUP = "health";
-
-    private String filter;
+    private List reports;
 
     public String execute()
         throws Exception
     {
-        ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup );
-
-        databases = new ArrayList();
-
-        if ( repositoryId != null && !repositoryId.equals( "-" ) )
-        {
-            RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId );
-            getReport( repositoryConfiguration, reportGroup );
-        }
-        else
-        {
-            for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
-            {
-                RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
-                getReport( repositoryConfiguration, reportGroup );
-            }
-        }
-        return SUCCESS;
-    }
-
-    private void getReport( RepositoryConfiguration repositoryConfiguration, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        ArtifactRepository repository = factory.createRepository( repositoryConfiguration );
-
-        ReportingDatabase database = executor.getReportDatabase( repository, reportGroup );
-
-        if ( filter != null && !filter.equals( "-" ) )
-        {
-            database = database.getFilteredDatabase( filter );
-        }
-
-        databases.add( database );
-    }
-
-    public String runReport()
-        throws Exception
-    {
-        ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup );
-
-        RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId );
-        ArtifactRepository repository = factory.createRepository( repositoryConfiguration );
-
-        ReportingDatabase database = executor.getReportDatabase( repository, reportGroup );
-        if ( database.isInProgress() )
-        {
-            return SUCCESS;
-        }
-
-        generateReport( database, repositoryConfiguration, reportGroup, repository );
-
+        reports = database.getArtifactDatabase().getAllArtifactResults();
+        
         return SUCCESS;
     }
 
-    private void generateReport( ReportingDatabase database, RepositoryConfiguration repositoryConfiguration,
-                                 ReportGroup reportGroup, ArtifactRepository repository )
-        throws DiscovererException, ReportingStoreException
-    {
-        database.setInProgress( true );
-
-        List blacklistedPatterns = new ArrayList();
-        if ( repositoryConfiguration.getBlackListPatterns() != null )
-        {
-            blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
-        }
-        if ( configuration.getGlobalBlackListPatterns() != null )
-        {
-            blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
-        }
-
-        ArtifactFilter filter;
-        if ( repositoryConfiguration.isIncludeSnapshots() )
-        {
-            filter = new AcceptAllArtifactFilter();
-        }
-        else
-        {
-            filter = new SnapshotArtifactFilter();
-        }
-
-        try
-        {
-            executor.runReports( reportGroup, repository, blacklistedPatterns, filter );
-        }
-        finally
-        {
-            database.setInProgress( false );
-        }
-    }
-
-    public void setReportGroup( String reportGroup )
-    {
-        this.reportGroup = reportGroup;
-    }
-
-    public String getReportGroup()
-    {
-        return reportGroup;
-    }
-
-    public String getRepositoryId()
-    {
-        return repositoryId;
-    }
-
-    public void setRepositoryId( String repositoryId )
-    {
-        this.repositoryId = repositoryId;
-    }
-
-    public List getDatabases()
-    {
-        return databases;
-    }
-
-    public void prepare()
-        throws Exception
-    {
-        configuration = archivaConfiguration.getConfiguration();
-    }
-
-    public Configuration getConfiguration()
-    {
-        return configuration;
-    }
-
-    public Map getReports()
-    {
-        return reports;
-    }
-
-    public String getFilter()
-    {
-        return filter;
-    }
-
-    public void setFilter( String filter )
-    {
-        this.filter = filter;
-    }
-
     public SecureActionBundle getSecureActionBundle()
         throws SecureActionException
     {
@@ -242,4 +64,9 @@ public class ReportsAction
 
         return bundle;
     }
+
+    public List getReports()
+    {
+        return reports;
+    }
 }
index f8f8ca318829e9e0047be5b97a1c979968d32fc3..ecc109a868464d43fdb22b227fd876fcdc23fc8e 100644 (file)
@@ -32,6 +32,7 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
 import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
 import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
 import org.apache.maven.archiva.proxy.ProxyException;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
 import org.apache.maven.archiva.web.util.VersionMerger;
 import org.apache.maven.artifact.Artifact;
 import org.apache.maven.artifact.factory.ArtifactFactory;
@@ -111,6 +112,11 @@ public class ShowArtifactAction
      * @plexus.requirement
      */
     private DependencyTreeBuilder dependencyTreeBuilder;
+    
+    /**
+     * @plexus.requirement
+     */
+    ArtifactResultsDatabase artifactsDatabase;
 
     private String groupId;
 
@@ -131,6 +137,8 @@ public class ShowArtifactAction
     private String artifactPath;
 
     private List mailingLists;
+    
+    private List reports;
 
     public String artifact()
         throws IOException, XmlPullParserException, ProjectBuildingException, ResourceDoesNotExistException,
@@ -182,6 +190,21 @@ public class ShowArtifactAction
 
         return SUCCESS;
     }
+    
+    public String reports()
+        throws IOException, XmlPullParserException, ProjectBuildingException
+    {
+        if ( !checkParameters() )
+        {
+            return ERROR;
+        }
+
+        System.out.println("#### In reports.");
+        this.reports = artifactsDatabase.findArtifactResults( groupId, artifactId, version );
+        System.out.println("#### Found " + reports.size() + " reports.");
+
+        return SUCCESS;
+    }
 
     public String dependees()
         throws IOException, XmlPullParserException, ProjectBuildingException, RepositoryIndexException,
@@ -518,4 +541,9 @@ public class ShowArtifactAction
     {
         return repositoryUrlName;
     }
+
+    public List getReports()
+    {
+        return reports;
+    }
 }
index 34e3a981abe94e389473fb1469f57ec375264b94..a2cdcffb457c61f50c20889a80244c18427e012c 100644 (file)
@@ -22,12 +22,13 @@ package org.apache.maven.archiva.web.action.admin;
 import com.opensymphony.xwork.ModelDriven;
 import com.opensymphony.xwork.Preparable;
 import com.opensymphony.xwork.Validateable;
+
 import org.apache.maven.archiva.configuration.ArchivaConfiguration;
 import org.apache.maven.archiva.configuration.Configuration;
 import org.apache.maven.archiva.configuration.InvalidConfigurationException;
 import org.apache.maven.archiva.indexer.RepositoryIndexException;
 import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
+import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
 import org.apache.maven.archiva.security.ArchivaRoleConstants;
 import org.codehaus.plexus.registry.RegistryException;
 import org.codehaus.plexus.scheduler.CronExpressionValidator;
@@ -56,15 +57,15 @@ public class ConfigureAction
     private ArchivaConfiguration archivaConfiguration;
 
     /**
-     * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
+     * @plexus.requirement
      */
-    private IndexerTaskExecutor indexer;
+    private ActiveManagedRepositories activeRepositories;
 
     /**
      * The configuration.
      */
     private Configuration configuration;
-
+    
     private CronExpressionValidator cronValidator;
 
     private String second = "0";
@@ -100,7 +101,7 @@ public class ConfigureAction
     {
         // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
         // TODO: if this is changed, do we move the index or recreate it?
-        configuration.setIndexerCronExpression( getCronExpression() );
+        configuration.setDataRefreshCronExpression( getCronExpression() );
 
         // Normalize the path
         File file = new File( configuration.getIndexPath() );
@@ -125,7 +126,7 @@ public class ConfigureAction
 
     public String input()
     {
-        String[] cronEx = configuration.getIndexerCronExpression().split( " " );
+        String[] cronEx = configuration.getDataRefreshCronExpression().split( " " );
         int i = 0;
 
         while ( i < cronEx.length )
@@ -157,9 +158,9 @@ public class ConfigureAction
             i++;
         }
 
-        if ( indexer.getLastIndexingTime() != 0 )
+        if ( activeRepositories.getLastDataRefreshTime() != 0 )
         {
-            lastIndexingTime = new Date( indexer.getLastIndexingTime() ).toString();
+            lastIndexingTime = new Date( activeRepositories.getLastDataRefreshTime() ).toString();
         }
         else
         {
index c0fa12f82cfa4f7298cddfdcc68b02f2d2afe97f..06daf24a9d61a9b018d2378ef6ab24243c148043 100644 (file)
@@ -20,12 +20,12 @@ package org.apache.maven.archiva.web.action.admin;
  */
 
 import org.apache.maven.archiva.scheduler.RepositoryTaskScheduler;
-import org.apache.maven.archiva.scheduler.TaskExecutionException;
 import org.apache.maven.archiva.security.ArchivaRoleConstants;
 import org.codehaus.plexus.security.rbac.Resource;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureAction;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle;
 import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
 import org.codehaus.plexus.xwork.action.PlexusActionSupport;
 
 /**
@@ -42,10 +42,10 @@ public class RunRepositoryTaskAction
      */
     private RepositoryTaskScheduler taskScheduler;
 
-    public String runIndexer()
+    public String runRefresh()
         throws TaskExecutionException
     {
-        taskScheduler.runIndexer();
+        taskScheduler.runDataRefresh();
 
         return SUCCESS;
     }
index 93568a2ae4f52c73257121c963a4f3fc71fc45c6..6ae6b8a499151eba19b1f03255c59822454cad2e 100644 (file)
@@ -35,7 +35,7 @@ import java.util.Date;
 /**
  * AuditLog - Audit Log. 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * 
  * @plexus.component role="org.apache.maven.archiva.web.repository.AuditLog"
index 77e45ebfd5c9b71d754ab4c441fe1aa45edf9d1c..db43e54b77dc33827adcf74319295acb87e67ea9 100644 (file)
@@ -49,7 +49,7 @@ import java.util.List;
 /**
  * ProxiedDavServer
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * @plexus.component role="org.codehaus.plexus.webdav.DavServerComponent"
  * role-hint="proxied"
index 687dae1f23ccf341363f165dcd1ba8f58831cbaa..cb31844438aca74dd4f8deaa5c1ce3f891745366 100644 (file)
@@ -52,7 +52,7 @@ import java.util.List;
 /**
  * RepositoryServlet
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class RepositoryServlet
index 226f27c387c56ad855330b39d0eb95196bbea38d..79c4b95b64e34d7be8d290ddece3265a11c2d6a1 100644 (file)
@@ -25,7 +25,7 @@ import com.opensymphony.xwork.util.OgnlValueStack;
 
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.artifact.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
 import org.apache.maven.archiva.configuration.RepositoryConfiguration;
 import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
 import org.apache.maven.project.ProjectBuildingException;
@@ -44,7 +44,7 @@ import javax.servlet.jsp.PageContext;
 /**
  * DownloadArtifact 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  * 
  * @plexus.component role="com.opensymphony.webwork.components.Component" role-hint="download-artifact" 
index 7127acad91b86a9684f53996e1214e41b4d0044a..21aaa982f753e9be07da204a14257db029e63c2f 100644 (file)
@@ -27,7 +27,7 @@ import javax.servlet.jsp.tagext.TagSupport;
 /**
  * DownloadArtifactTag 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class DownloadArtifactTag
index 8efab8a18a48f881d49e4520520d32c78b96064e..b59c5636fcea77aabf5fdf648a5ab833256c543f 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.Tag;
 /**
  * ExpressionTool 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class ExpressionTool
index 9d2e5d6f8ed638be9b7b4c9a18f99cb5f32a566a..75b3b8081be2a919bc5b0af18ef08595a6a75dd3 100644 (file)
@@ -33,7 +33,7 @@ import javax.servlet.http.HttpServletResponse;
 /**
  * GroupIdLink 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class GroupIdLink
index 462d6ae6f2d43a848d7aca4a223a01ea44d055ec..ec42b8bbec0cba3215b9ab9db74d8166dc96fc90 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.TagSupport;
 /**
  * GroupIdLink 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class GroupIdLinkTag
index 2b730c6bbd98bdf4f52cb90b68c558d8a6cefcff..a56068a3021d9d6b4dfc82d7693a2aa225a94ecb 100644 (file)
@@ -29,7 +29,7 @@ import javax.servlet.jsp.PageContext;
 /**
  * PlexusTagUtil 
  *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
  * @version $Id$
  */
 public class PlexusTagUtil
index 62b65f63eae1f87a789fa2c16260f3db716a638f..9b0d313bf5e2a4417643513da22f464aae502688 100644 (file)
       </configuration>
     </component>
 
+    <component>
+      <role>org.codehaus.plexus.jdo.JdoFactory</role>
+      <role-hint>archiva</role-hint>
+      <implementation>org.codehaus.plexus.jdo.DataSourceConfigurableJdoFactory</implementation>
+      <configuration>
+
+        <connectionFactoryName>java:comp/env/jdbc/archiva</connectionFactoryName>
+        <shutdownConnectionFactoryName>java:comp/env/jdbc/archivaShutdown</shutdownConnectionFactoryName>
+
+        <!-- JPOX and JDO configuration -->
+        <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+        <otherProperties>
+          <property>
+            <name>org.jpox.autoCreateSchema</name>
+            <value>true</value>
+          </property>
+          <property>
+            <name>org.jpox.validateTables</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateConstraints</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.validateColumns</name>
+            <value>false</value>
+          </property>
+          <property>
+            <name>org.jpox.autoStartMechanism</name>
+            <value>None</value>
+          </property>
+          <property>
+            <name>org.jpox.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.poid.transactionIsolation</name>
+            <value>READ_UNCOMMITTED</value>
+          </property>
+          <property>
+            <name>org.jpox.rdbms.dateTimezone</name>
+            <value>JDK_DEFAULT_TIMEZONE</value>
+          </property>
+          <!-- NEEDED FOR POSTGRES, But causes problems in other JDBC implementations.
+          <property>
+            <name>org.jpox.identifier.case</name>
+            <value>PreserveCase</value>
+          </property>
+            -->
+        </otherProperties>
+      </configuration>
+    </component>
+
     <component>
       <role>org.codehaus.plexus.jdo.JdoFactory</role>
       <role-hint>users</role-hint>
             <name>org.jpox.rdbms.dateTimezone</name>
             <value>JDK_DEFAULT_TIMEZONE</value>
           </property>
-<!--
+          <!-- NEEDED FOR POSTGRES, But causes problems in other JDBC implementations.
           <property>
             <name>org.jpox.identifier.case</name>
             <value>PreserveCase</value>
           </property>
--->
+            -->
         </otherProperties>
       </configuration>
     </component>
      -->
     <component>
       <role>org.codehaus.plexus.logging.LoggerManager</role>
-      <implementation>org.codehaus.plexus.logging.log4j.Log4JLoggerManager</implementation>
+      <implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
       <lifecycle-handler>basic</lifecycle-handler>
-
-      <configuration>
-        <threshold>WARN</threshold>
-        <default-appender>console,rolling</default-appender>
-
-        <appenders>
-          <appender>
-            <id>console</id>
-            <threshold>DEBUG</threshold>
-            <type>org.apache.log4j.ConsoleAppender</type>
-            <conversion-pattern>%d [%t] %-5p %-30c{1} - %m%n</conversion-pattern>
-          </appender>
-
-          <appender>
-            <id>rolling</id>
-            <threshold>DEBUG</threshold>
-            <type>org.apache.log4j.DailyRollingFileAppender</type>
-            <conversion-pattern>%-4r [%t] %-5p %c %x - %m%n</conversion-pattern>
-
-            <properties>
-              <property>
-                <name>file</name>
-                <value>${appserver.base}/logs/archiva.log</value>
-              </property>
-              <property>
-                <name>append</name>
-                <value>true</value>
-              </property>
-              <property>
-                <name>datePattern</name>
-                <value>'.'yyyy-MM-dd</value>
-              </property>
-            </properties>
-          </appender>
-        </appenders>
-
-        <levels>
-          <!-- Help identify bugs during testing -->
-          <level>
-            <hierarchy>org.apache.maven</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>org.codehaus.plexus.security</hierarchy>
-            <level>INFO</level>
-          </level>
-          <!-- squelch noisy objects (for now) -->
-          <level>
-            <hierarchy>org.codehaus.plexus.mailsender.MailSender</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>org.quartz</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>org.apache.jasper</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>com.opensymphony.xwork</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>com.opensymphony.webwork</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>org.codehaus.plexus.PlexusContainer</hierarchy>
-            <level>INFO</level>
-          </level>
-          <level>
-            <hierarchy>JPOX</hierarchy>
-            <level>WARN</level>
-          </level>
-          <level>
-            <hierarchy>JPOX.MetaData</hierarchy>
-            <level>ERROR</level>
-          </level>
-          <!--
-                    <level>
-                      <hierarchy>JPOX.RDBMS.SQL</hierarchy>
-                      <level>DEBUG</level>
-                    </level>
-          -->
-          <level>
-            <hierarchy>freemarker</hierarchy>
-            <level>WARN</level>
-          </level>
-        </levels>
-      </configuration>
     </component>
-
   </components>
 
   <load-on-start>
     </component>
     <component>
       <role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
-      <role-hint>indexer</role-hint>
+      <role-hint>data-refresh</role-hint>
     </component>
   </load-on-start>
 
diff --git a/archiva-webapp/src/main/resources/log4j.xml b/archiva-webapp/src/main/resources/log4j.xml
new file mode 100644 (file)
index 0000000..9875c60
--- /dev/null
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+
+  <appender name="rolling" class="org.apache.log4j.DailyRollingFileAppender">
+    <param name="file" value="${appserver.base}/logs/archiva.log" />
+    <param name="append" value="true" />
+    <param name="datePattern" value="'.'yyyy-MM-dd" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%-4r [%t] %-5p %c %x - %m%n"/>
+    </layout>
+  </appender>
+
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out"/>
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %-30c{1} - %m%n"/>
+    </layout>
+  </appender>
+
+  <!-- Help identify bugs during testing -->
+  <logger name="org.apache.maven">
+    <level value="info"/>
+  </logger>
+
+  <logger name="org.codehaus.plexus.security">
+    <level value="info"/>
+  </logger>
+
+  <!-- squelch noisy objects (for now) -->
+  <logger name="org.codehaus.plexus.mailsender.MailSender">
+    <level value="info"/>
+  </logger>
+
+  <logger name="org.quartz">
+    <level value="info"/>
+  </logger>
+
+  <logger name="org.apache.jasper">
+    <level value="info"/>
+  </logger>
+
+  <logger name="com.opensymphony.xwork">
+    <level value="info"/>
+  </logger>
+
+  <logger name="com.opensymphony.webwork">
+    <level value="info"/>
+  </logger>
+
+  <logger name="org.codehaus.plexus.PlexusContainer">
+    <level value="info"/>
+  </logger>
+
+  <logger name="JPOX">
+    <level value="warn"/>
+  </logger>
+
+  <logger name="JPOX.MetaData">
+    <level value="error"/>
+  </logger>
+
+<!--
+  <logger name="JPOX.RDBMS.SQL">
+    <level value="debug"/>
+  </logger>
+  -->
+
+  <logger name="freemarker">
+    <level value="warn"/>
+  </logger>
+
+  <root>
+    <priority value ="warn" />
+    <appender-ref ref="console" />
+    <appender-ref ref="rolling" />
+  </root>
+
+</log4j:configuration>
index f89edbe4b9af7fce83f77d8927e7a0b6829c8f54..703e922fcd83bbc22973703a2f8257ad10e8d7ce 100644 (file)
         <param name="actionName">login</param>
         <param name="namespace">/security</param>
       </result>
+      <result name="requires-authorization" type="redirect-action">
+        <param name="actionName">login</param>
+        <param name="namespace">/security</param>
+      </result>
       <result name="security-register-success" type="redirect-action">
         <param name="actionName">login</param>
         <param name="namespace">/security</param>
       <result>/WEB-INF/jsp/showArtifact.jsp</result>
     </action>
 
+    <action name="showArtifactReports" class="showArtifactAction" method="reports">
+      <result>/WEB-INF/jsp/showArtifact.jsp</result>
+    </action>
+    
     <action name="showArtifactDependencies" class="showArtifactAction" method="dependencies">
       <result>/WEB-INF/jsp/showArtifact.jsp</result>
     </action>
diff --git a/archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf b/archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf
new file mode 100644 (file)
index 0000000..1a9d416
--- /dev/null
@@ -0,0 +1,38 @@
+<%--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  --%>
+
+<%@ taglib prefix="ww" uri="/webwork" %>
+<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
+<%@ taglib prefix="my" tagdir="/WEB-INF/tags" %>
+
+<c:forEach items="${reports}" var="report">
+  <h3>
+      ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type}
+  </h3>
+  <ul>
+    <c:forEach items="${repor.results}" var="result">
+      <li>
+        <b>${result.reason}</b>
+      </li>
+    </c:forEach>
+  </ul>
+</c:forEach>
+<c:if test="${empty(reports)}">
+  <strong>No reports for this artifact.</strong>
+</c:if>
index 5d73c0cd77cc337f7b0500e550719f74bddc3290..33d342ae069b4fea9038015e8b7dd3cc74981ec0 100644 (file)
@@ -26,9 +26,7 @@
 
 <html>
 <head>
-  <ww:set name="reports" value="reports"/>
-  <ww:set name="reportGroup" value="reportGroup"/>
-  <title>Report: ${reports[reportGroup].name}</title>
+  <title>Reports</title>
   <ww:head/>
 </head>
 
 
 <div id="contentArea">
 
-<pss:ifAnyAuthorized permissions="archiva-access-reports">
-  <ww:form action="reports" namespace="/admin">
-    <ww:select list="reports" label="Report" name="reportGroup" onchange="document.reports.submit();"/>
-    <ww:select list="configuration.repositories" listKey="id" listValue="name" label="Repository" headerKey="-"
-               headerValue="(All repositories)" name="repositoryId" onchange="document.reports.submit();"/>
-    <ww:select list="reports[reportGroup].reports" label="Filter" headerKey="-" headerValue="(All Problems)"
-               name="filter" onchange="document.reports.submit();"/>
-    <ww:submit value="Get Report"/>
-  </ww:form>
-</pss:ifAnyAuthorized>
-
-<ww:set name="databases" value="databases"/>
-<c:forEach items="${databases}" var="database">
-<div>
-<div style="float: right">
-    <%-- TODO!
-  <a href="#">Repair all</a>
-  |
-    --%>
-  <c:choose>
-    <c:when test="${!database.inProgress}">
-      <pss:ifAuthorized permission="archiva-access-reports">
-        <ww:url id="regenerateReportUrl" action="runReport" namespace="/admin">
-          <ww:param name="repositoryId">${database.repository.id}</ww:param>
-          <ww:param name="reportGroup" value="reportGroup"/>
-        </ww:url>
-        <ww:a href="%{regenerateReportUrl}">Regenerate Report</ww:a>
-      </pss:ifAuthorized>
-    </c:when>
-    <c:otherwise>
-      <!-- TODO: would be good to have a generic task/job mechanism that tracked progress and ability to run
-      concurrently -->
-      <span style="color: gray;">Report in progress</span>
-    </c:otherwise>
-  </c:choose>
-</div>
-<h2>Repository: ${database.repository.name}</h2>
-
-<p>
-  <c:choose>
-    <c:when test="${!empty(database.reporting.lastModified)}">
-      Status:
-      <img src="<c:url value="/images/icon_error_sml.gif"/>" width="15" height="15" alt=""/>
-      ${database.numFailures}
-      <img src="<c:url value="/images/icon_warning_sml.gif"/>" width="15" height="15" alt=""/>
-      ${database.numWarnings}
-      <img src="<c:url value="/images/icon_info_sml.gif"/>" width="15" height="15" alt=""/>
-      ${database.numNotices}
-
-      <span style="font-size: x-small">
-        <jsp:useBean id="date" class="java.util.Date"/>
-        <c:set target="${date}" property="time" value="${database.reporting.lastModified}"/>
-        Last updated: <fmt:formatDate type="both" value="${date}"/>,
-        execution time: <fmt:formatNumber maxFractionDigits="0" value="${database.reporting.executionTime / 60000}"/> minutes
-        <fmt:formatNumber maxFractionDigits="0" value="${(database.reporting.executionTime / 1000) % 60}"/> seconds
-      </span>
-    </c:when>
-    <c:otherwise>
-      <b>
-        This report has not yet been generated. <a href="${url}">Generate Report</a>
-      </b>
-    </c:otherwise>
-  </c:choose>
-</p>
-
-  <%-- TODO need to protect iterations against concurrent modification exceptions by cloning the lists synchronously --%>
-  <%-- TODO! paginate (displaytag?) --%>
-<c:if test="${!empty(database.reporting.artifacts)}">
-  <h3>Artifacts</h3>
-  <c:forEach items="${database.reporting.artifacts}" var="artifact" begin="0" end="2">
-    <ul>
-      <c:forEach items="${artifact.failures}" var="result">
-        <li class="errorBullet">${result.reason}</li>
-      </c:forEach>
-      <c:forEach items="${artifact.warnings}" var="result">
-        <li class="warningBullet">${result.reason}</li>
-      </c:forEach>
-      <c:forEach items="${artifact.notices}" var="result">
-        <li class="infoBullet">${result.reason}</li>
-      </c:forEach>
-    </ul>
-    <p style="text-indent: 3em;">
-      <my:showArtifactLink groupId="${artifact.groupId}" artifactId="${artifact.artifactId}"
-                           version="${artifact.version}" classifier="${artifact.classifier}"/>
-    </p>
-    <%-- TODO!
-              <td>
-                <a href="#">Repair</a>
-              </td>
-    --%>
-  </c:forEach>
-  <c:if test="${fn:length(database.reporting.artifacts) gt 3}">
-    <p>
-      <b>... more ...</b>
-    </p>
-  </c:if>
-</c:if>
-<c:if test="${!empty(database.metadataWithProblems)}">
-  <h3>Metadata</h3>
-  <c:forEach items="${database.metadataWithProblems}" var="metadata" begin="0" end="2">
-    <ul>
-      <c:forEach items="${metadata.failures}" var="result">
-        <li class="errorBullet">${result.reason}</li>
-      </c:forEach>
-      <c:forEach items="${metadata.warnings}" var="result">
-        <li class="warningBullet">${result.reason}</li>
-      </c:forEach>
-      <c:forEach items="${metadata.notices}" var="result">
-        <li class="infoBullet">${result.reason}</li>
-      </c:forEach>
-    </ul>
-    <p style="text-indent: 3em;">
-      <my:showArtifactLink groupId="${metadata.groupId}" artifactId="${metadata.artifactId}"
-                           version="${metadata.version}"/>
-    </p>
-    <%-- TODO!
-              <td>
-                <a href="#">Repair</a>
-              </td>
-    --%>
-  </c:forEach>
-  <c:if test="${fn:length(database.metadataWithProblems) gt 3}">
-    <p>
-      <b>... more ...</b>
-    </p>
-  </c:if>
-</c:if>
-</div>
+<c:forEach items="${reports}" var="report">
+  <h3>
+      ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type}
+  </h3>
+  <ul>
+    <c:forEach items="${repor.results}" var="result">
+      <li>
+        <b>${result.reason}</b>
+      </li>
+    </c:forEach>
+  </ul>
 </c:forEach>
+<c:if test="${empty(reports)}">
+  <strong>No reports for any artifact.</strong>
+</c:if>
+
 </div>
 
 </body>
index 60299d7d3f0f85e7a3968cfb1d70c20c97681125..0ba14bb8425f7953abe687450e70053d72da64b0 100644 (file)
@@ -21,6 +21,7 @@
 <%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
 <%@ taglib prefix="my" tagdir="/WEB-INF/tags" %>
 <%@ taglib prefix="archiva" uri="http://maven.apache.org/archiva" %>
+<%@ taglib prefix="pss" uri="/plexusSecuritySystem" %>
 
 <html>
 <head>
         </ww:url>
       </c:set>
       <my:currentWWUrl url="${url}">Mailing Lists</my:currentWWUrl>
+      <pss:ifAnyAuthorized permissions="archiva-access-reports">
+        <c:set var="url">
+             <ww:url action="showArtifactReports">
+               <ww:param name="groupId" value="%{groupId}"/>
+               <ww:param name="artifactId" value="%{artifactId}"/>
+               <ww:param name="version" value="%{version}"/>
+             </ww:url>
+           </c:set>
+           <my:currentWWUrl url="${url}">Reports</my:currentWWUrl>
+      </pss:ifAnyAuthorized>
+      
     </span>
   </div>
 
       <c:when test="${mailingLists != null}">
         <%@ include file="/WEB-INF/jsp/include/mailingLists.jspf" %>
       </c:when>
+      <c:when test="${reports != null}">
+        <%@ include file="/WEB-INF/jsp/include/artifactReports.jspf" %>
+      </c:when>
       <c:otherwise>
         <%@ include file="/WEB-INF/jsp/include/artifactInfo.jspf" %>
       </c:otherwise>
diff --git a/pom.xml b/pom.xml
index 124dcc2b1000874b9f3d841e97f0be30fda7b27d..897821db7f5c951ffa081e9016274f2e2313ec7e 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -24,6 +24,9 @@
     <version>4</version>
     <relativePath>../pom/maven/pom.xml</relativePath>
   </parent>
+  <prerequisites>
+    <maven>2.0.5</maven>
+  </prerequisites>
   <groupId>org.apache.maven.archiva</groupId>
   <artifactId>archiva</artifactId>
   <packaging>pom</packaging>
@@ -81,9 +84,9 @@
         <version>1.3.3</version>
         <executions>
           <execution>
+            <id>generate</id>
             <goals>
               <goal>descriptor</goal>
-              <goal>merge-descriptors</goal>
             </goals>
           </execution>
         </executions>
   <modules>
     <module>archiva-applet</module>
     <module>archiva-converter</module>
+    <module>archiva-common</module>
     <module>archiva-discoverer</module>
     <module>archiva-reports-standard</module>
     <module>archiva-indexer</module>
         <artifactId>maven-app-configuration-web</artifactId>
         <version>1.0-SNAPSHOT</version>
       </dependency>
-      <!--
-        Rejected Plexus Container / Component Versions:
-          1.0-alpha-11
-             2007-01-17 11:40:40.371::WARN:  Failed startup of context org.mortbay.jetty.webapp.WebAppContext@553763
-             {/,/home/joakim/code/maven/trunks/archiva/archiva-webapp/src/main/webapp}
-             java.lang.NullPointerException
-                     at org.codehaus.plexus.classworlds.strategy.DefaultStrategy.getResource(DefaultStrategy.java:99)
-                     at org.codehaus.plexus.classworlds.strategy.ForeignStrategy.getResource(ForeignStrategy.java:54)
-                     at org.codehaus.plexus.classworlds.strategy.DefaultStrategy.getResourceAsStream(DefaultStrategy.java:107)
-                     at org.codehaus.plexus.classworlds.realm.ClassRealm.getResourceAsStream(ClassRealm.java:207)
-                     at org.codehaus.plexus.DefaultPlexusContainer.<init>(DefaultPlexusContainer.java:244)
-        
-          1.0-alpha-12
-          1.0-alpha-13
-          1.0-alpha-14 
-             Caused by: org.codehaus.plexus.PlexusContainerException: The specified user configuration 
-             'file:/home/joakim/code/maven/trunks/archiva/archiva-webapp/src/main/webapp/WEB-INF/classes/META-INF/plexus/application.xml' is null.
-
-          1.0-alpha-15
-             The resolution of ${configuration.store.file} is never attempted.
-
-          1.0-alpha-16-SNAPSHOT
-             Incompatible with plexus-xwork-integration 
-        -->
       <dependency>
         <groupId>org.codehaus.plexus</groupId>
         <artifactId>plexus-container-default</artifactId>
         <artifactId>wagon-http-lightweight</artifactId>
         <version>${wagon.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.maven.archiva</groupId>
+        <artifactId>archiva-common</artifactId>
+        <version>${pom.version}</version>
+      </dependency>
+      <!--
+      <dependency>
+        <groupId>org.apache.maven.archiva</groupId>
+        <artifactId>archiva-common</artifactId>
+        <version>${pom.version}</version>
+        <classifier>tests</classifier>
+        <scope>test</scope>
+      </dependency>
+       -->
       <dependency>
         <groupId>org.apache.maven.archiva</groupId>
         <artifactId>archiva-core</artifactId>
       </build>
     </profile>
   </profiles>
-  <!-- TODO: remove once modello is released -->
-  <pluginRepositories>
-    <pluginRepository>
-      <id>codehaus.org</id>
-      <url>http://snapshots.repository.codehaus.org</url>
-    </pluginRepository>
-  </pluginRepositories>
-  <!-- TODO: remove once ehcache, p-sec, registry, webdav, xwork, naming released -->
   <repositories>
     <repository>
       <id>codehaus.org</id>
+      <url>http://repository.codehaus.org</url>
+      <releases>
+        <enabled>true</enabled>
+      </releases>
+      <snapshots>
+        <enabled>false</enabled>
+      </snapshots>
+    </repository>
+    <!-- TODO: remove once ehcache, p-sec, registry, webdav, xwork, naming released -->
+    <repository>
+      <id>snapshots.codehaus.org</id>
       <url>http://snapshots.repository.codehaus.org</url>
       <releases>
         <enabled>false</enabled>
       </snapshots>
     </repository>
   </repositories>
+  <!-- TODO: remove once modello is released -->
+  <pluginRepositories>
+    <pluginRepository>
+      <id>snapshots.codehaus.org</id>
+      <url>http://snapshots.repository.codehaus.org</url>
+      <releases>
+        <enabled>false</enabled>
+      </releases>
+      <snapshots>
+        <enabled>true</enabled>
+      </snapshots>
+    </pluginRepository>
+  </pluginRepositories>
   <properties>
     <maven.version>2.0.5</maven.version>
     <wagon.version>1.0-beta-2</wagon.version>