]> source.dussan.org Git - archiva.git/commitdiff
o breaking the reporting into packages to get a grip of the architecture
authorJason van Zyl <jvanzyl@apache.org>
Sat, 2 Dec 2006 00:07:57 +0000 (00:07 +0000)
committerJason van Zyl <jvanzyl@apache.org>
Sat, 2 Dec 2006 00:07:57 +0000 (00:07 +0000)
git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@481451 13f79535-47bb-0310-9956-ffa450edef68

70 files changed:
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/AbstractReportGroup.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumMetadataReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportExecutor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportGroup.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportingStore.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/MetadataReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportGroup.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportExecutor.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportGroup.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingDatabase.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingMetadataFilter.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStore.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStoreException.java [deleted file]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/OldArtifactReportGroup.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java [new file with mode: 0644]
archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AbstractChecksumArtifactReporterTestCase.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/ChecksumArtifactReporterTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DefaultArtifactReporterTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.java [deleted file]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AbstractChecksumArtifactReporterTestCase.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumArtifactReporterTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/DefaultArtifactReporterTest.java [new file with mode: 0644]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.xml [deleted file]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.xml [deleted file]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.xml [deleted file]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.xml [new file with mode: 0644]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.xml [new file with mode: 0644]
archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.xml [new file with mode: 0644]

diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/AbstractReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/AbstractReportGroup.java
deleted file mode 100644 (file)
index d0bd7ce..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.model.Model;
-
-import java.util.Iterator;
-import java.util.Map;
-
-/**
- * Basic functionality for all report groups.
- */
-public abstract class AbstractReportGroup
-    implements ReportGroup
-{
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.reporting.ArtifactReportProcessor"
-     */
-    private Map artifactReports;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.reporting.MetadataReportProcessor"
-     */
-    private Map metadataReports;
-
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
-    {
-        for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
-        {
-            Map.Entry entry = (Map.Entry) i.next();
-
-            if ( includeReport( (String) entry.getKey() ) )
-            {
-                ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
-
-                report.processArtifact( artifact, model, reportingDatabase );
-            }
-        }
-    }
-
-    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                                 ReportingDatabase reportingDatabase )
-    {
-        for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
-        {
-            Map.Entry entry = (Map.Entry) i.next();
-
-            if ( includeReport( (String) entry.getKey() ) )
-            {
-                MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
-
-                report.processMetadata( repositoryMetadata, repository, reportingDatabase );
-            }
-        }
-    }
-
-    public String toString()
-    {
-        return getName();
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ArtifactReportProcessor.java
deleted file mode 100644 (file)
index 8667821..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.model.Model;
-
-/**
- * This interface will be called by the main system for each artifact as it is discovered. This is how each of the
- * different types of reports are implemented.
- */
-public interface ArtifactReportProcessor
-{
-    String ROLE = ArtifactReportProcessor.class.getName();
-
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessor.java
deleted file mode 100644 (file)
index dfc5190..0000000
+++ /dev/null
@@ -1,344 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.layer.RepositoryQueryLayer;
-import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.Plugin;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.codehaus.plexus.util.FileUtils;
-import org.apache.commons.lang.StringUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * This class will report on bad metadata files.  These include invalid version declarations and incomplete version
- * information inside the metadata file.  Plugin metadata will be checked for validity of the latest plugin artifacts.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.MetadataReportProcessor" role-hint="bad-metadata"
- */
-public class BadMetadataReportProcessor
-    implements MetadataReportProcessor
-{
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
-
-    private static final String ROLE_HINT = "bad-metadata";
-
-    /**
-     * Process the metadata encountered in the repository and report all errors found, if any.
-     *
-     * @param metadata   the metadata to be processed.
-     * @param repository the repository where the metadata was encountered
-     * @param reporter   the ReportingDatabase to receive processing results
-     */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
-    {
-        if ( metadata.storedInGroupDirectory() )
-        {
-            try
-            {
-                checkPluginMetadata( metadata, repository, reporter );
-            }
-            catch ( IOException e )
-            {
-                addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
-            }
-        }
-        else
-        {
-            Versioning versioning = metadata.getMetadata().getVersioning();
-            boolean found = false;
-            if ( versioning != null )
-            {
-                String lastUpdated = versioning.getLastUpdated();
-                if ( lastUpdated != null && lastUpdated.length() != 0 )
-                {
-                    found = true;
-                }
-            }
-            if ( !found )
-            {
-                addFailure( reporter, metadata, "missing-last-updated",
-                            "Missing lastUpdated element inside the metadata." );
-            }
-
-            if ( metadata.storedInArtifactVersionDirectory() )
-            {
-                checkSnapshotMetadata( metadata, repository, reporter );
-            }
-            else
-            {
-                checkMetadataVersions( metadata, repository, reporter );
-
-                try
-                {
-                    checkRepositoryVersions( metadata, repository, reporter );
-                }
-                catch ( IOException e )
-                {
-                    String reason = "Error getting plugin artifact directories versions: " + e;
-                    addWarning( reporter, metadata, null, reason );
-                }
-            }
-        }
-    }
-
-    private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( metadata, ROLE_HINT, problem, reason );
-    }
-
-    /**
-     * Method for processing a GroupRepositoryMetadata
-     *
-     * @param metadata   the metadata to be processed.
-     * @param repository the repository where the metadata was encountered
-     * @param reporter   the ReportingDatabase to receive processing results
-     */
-    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                      ReportingDatabase reporter )
-        throws IOException
-    {
-        File metadataDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
-        List pluginDirs = getArtifactIdFiles( metadataDir );
-
-        Map prefixes = new HashMap();
-        for ( Iterator plugins = metadata.getMetadata().getPlugins().iterator(); plugins.hasNext(); )
-        {
-            Plugin plugin = (Plugin) plugins.next();
-
-            String artifactId = plugin.getArtifactId();
-            if ( artifactId == null || artifactId.length() == 0 )
-            {
-                addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
-                            "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
-            }
-
-            String prefix = plugin.getPrefix();
-            if ( prefix == null || prefix.length() == 0 )
-            {
-                addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
-                            "Missing or empty plugin prefix for artifactId " + artifactId + "." );
-            }
-            else
-            {
-                if ( prefixes.containsKey( prefix ) )
-                {
-                    addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
-                                "Duplicate plugin prefix found: " + prefix + "." );
-                }
-                else
-                {
-                    prefixes.put( prefix, plugin );
-                }
-            }
-
-            if ( artifactId != null && artifactId.length() > 0 )
-            {
-                File pluginDir = new File( metadataDir, artifactId );
-                if ( !pluginDirs.contains( pluginDir ) )
-                {
-                    addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
-                                "Metadata plugin " + artifactId + " not found in the repository" );
-                }
-                else
-                {
-                    pluginDirs.remove( pluginDir );
-                }
-            }
-        }
-
-        if ( pluginDirs.size() > 0 )
-        {
-            for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
-            {
-                File plugin = (File) plugins.next();
-                addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
-                    plugin.getName() + " is present in the repository but " + "missing in the metadata." );
-            }
-        }
-    }
-
-    /**
-     * Method for processing a SnapshotArtifactRepository
-     *
-     * @param metadata   the metadata to be processed.
-     * @param repository the repository where the metadata was encountered
-     * @param reporter   the ReportingDatabase to receive processing results
-     */
-    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
-    {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
-
-        Versioning versioning = metadata.getMetadata().getVersioning();
-        if ( versioning != null )
-        {
-            Snapshot snapshot = versioning.getSnapshot();
-
-            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
-                                                  snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
-            Artifact artifact =
-                artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
-            artifact.isSnapshot(); // trigger baseVersion correction
-
-            if ( !repositoryQueryLayer.containsArtifact( artifact ) )
-            {
-                addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
-                            "Snapshot artifact " + version + " does not exist." );
-            }
-        }
-    }
-
-    /**
-     * Method for validating the versions declared inside an ArtifactRepositoryMetadata
-     *
-     * @param metadata   the metadata to be processed.
-     * @param repository the repository where the metadata was encountered
-     * @param reporter   the ReportingDatabase to receive processing results
-     */
-    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                        ReportingDatabase reporter )
-    {
-        RepositoryQueryLayer repositoryQueryLayer =
-            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
-
-        Versioning versioning = metadata.getMetadata().getVersioning();
-        if ( versioning != null )
-        {
-            for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
-            {
-                String version = (String) versions.next();
-
-                Artifact artifact =
-                    artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
-
-                if ( !repositoryQueryLayer.containsArtifact( artifact ) )
-                {
-                    addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
-                        version + " is present in metadata but " + "missing in the repository." );
-                }
-            }
-        }
-    }
-
-    /**
-     * Searches the artifact repository directory for all versions and verifies that all of them are listed in the
-     * ArtifactRepositoryMetadata
-     *
-     * @param metadata   the metadata to be processed.
-     * @param repository the repository where the metadata was encountered
-     * @param reporter   the ReportingDatabase to receive processing results
-     * @throws java.io.IOException if there is a problem reading from the file system
-     */
-    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
-                                          ReportingDatabase reporter )
-        throws IOException
-    {
-        Versioning versioning = metadata.getMetadata().getVersioning();
-        List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
-        File versionsDir =
-            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
-
-        // TODO: I don't know how this condition can happen, but it was seen on the main repository.
-        // Avoid hard failure
-        if ( versionsDir.exists() )
-        {
-            List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
-            for ( Iterator i = versions.iterator(); i.hasNext(); )
-            {
-                File path = new File( (String) i.next() );
-                String version = path.getParentFile().getName();
-                if ( !metadataVersions.contains( version ) )
-                {
-                    addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
-                        version + " found in the repository but " + "missing in the metadata." );
-                }
-            }
-        }
-        else
-        {
-            addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
-        }
-    }
-
-    /**
-     * Used to gather artifactIds from a groupId directory.
-     *
-     * @param groupIdDir the directory of the group
-     * @return the list of artifact ID File objects for each directory
-     * @throws IOException if there was a failure to read the directories
-     */
-    private List getArtifactIdFiles( File groupIdDir )
-        throws IOException
-    {
-        List artifactIdFiles = new ArrayList();
-
-        File[] files = groupIdDir.listFiles();
-        if ( files != null )
-        {
-            for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
-            {
-                File artifactDir = (File) i.next();
-
-                if ( artifactDir.isDirectory() )
-                {
-                    List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
-                    if ( versions.size() > 0 )
-                    {
-                        artifactIdFiles.add( artifactDir );
-                    }
-                }
-            }
-        }
-
-        return artifactIdFiles;
-    }
-
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumArtifactReportProcessor.java
deleted file mode 100644 (file)
index 245ccfd..0000000
+++ /dev/null
@@ -1,101 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * This class reports invalid and mismatched checksums of artifacts and metadata files.
- * It validates MD5 and SHA-1 checksums.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="checksum"
- */
-public class ChecksumArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    /**
-     * @plexus.requirement role-hint="sha1"
-     */
-    private Digester sha1Digester;
-
-    /**
-     * @plexus.requirement role-hint="md5"
-     */
-    private Digester md5Digester;
-
-    private static final String ROLE_HINT = "checksum";
-
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        //check if checksum files exist
-        String path = repository.pathOf( artifact );
-        File file = new File( repository.getBasedir(), path );
-
-        // TODO: make md5 configurable
-//        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
-    }
-
-    private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, Artifact artifact )
-    {
-        File checksumFile = new File( repository.getBasedir(), path );
-        if ( checksumFile.exists() )
-        {
-            try
-            {
-                digester.verify( file, FileUtils.fileRead( checksumFile ) );
-            }
-            catch ( DigesterException e )
-            {
-                addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
-            }
-            catch ( IOException e )
-            {
-                addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
-            }
-        }
-        else
-        {
-            addFailure( reporter, artifact, "checksum-missing",
-                        digester.getAlgorithm() + " checksum file does not exist." );
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ChecksumMetadataReportProcessor.java
deleted file mode 100644 (file)
index 0361d99..0000000
+++ /dev/null
@@ -1,104 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * This class reports invalid and mismatched checksums of artifacts and metadata files.
- * It validates MD5 and SHA-1 checksums.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.MetadataReportProcessor" role-hint="checksum-metadata"
- */
-public class ChecksumMetadataReportProcessor
-    implements MetadataReportProcessor
-{
-    /**
-     * @plexus.requirement role-hint="sha1"
-     */
-    private Digester sha1Digester;
-
-    /**
-     * @plexus.requirement role-hint="md5"
-     */
-    private Digester md5Digester;
-
-    private static final String ROLE_HINT = "checksum-metadata";
-
-    /**
-     * Validate the checksums of the metadata. Get the metadata file from the
-     * repository then validate the checksum.
-     */
-    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
-                                 ReportingDatabase reporter )
-    {
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        //check if checksum files exist
-        String path = repository.pathOfRemoteRepositoryMetadata( metadata );
-        File file = new File( repository.getBasedir(), path );
-
-        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
-        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
-    }
-
-    private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
-                                 ReportingDatabase reporter, RepositoryMetadata metadata )
-    {
-        File checksumFile = new File( repository.getBasedir(), path );
-        if ( checksumFile.exists() )
-        {
-            try
-            {
-                digester.verify( file, FileUtils.fileRead( checksumFile ) );
-            }
-            catch ( DigesterException e )
-            {
-                addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
-            }
-            catch ( IOException e )
-            {
-                addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
-            }
-        }
-        else
-        {
-            addFailure( reporter, metadata, "checksum-missing",
-                        digester.getAlgorithm() + " checksum file does not exist." );
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
-                                    String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
-    }
-
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportExecutor.java
deleted file mode 100644 (file)
index 81256c6..0000000
+++ /dev/null
@@ -1,240 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Report executor implementation.
- *
- * @todo should the report set be limitable by configuration?
- * @plexus.component
- */
-public class DefaultReportExecutor
-    extends AbstractLogEnabled
-    implements ReportExecutor
-{
-    /**
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    /**
-     * @plexus.requirement
-     */
-    private ReportingStore reportingStore;
-
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
-     */
-    private Map artifactDiscoverers;
-
-    /**
-     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
-     */
-    private Map metadataDiscoverers;
-
-    private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = metadata.iterator(); i.hasNext(); )
-        {
-            RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
-
-            File file =
-                new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
-            reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
-
-            reportGroup.processMetadata( repositoryMetadata, repository, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
-        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
-        {
-            Artifact artifact = (Artifact) i.next();
-
-            Model model = null;
-            try
-            {
-                Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
-                                                                              artifact.getArtifactId(),
-                                                                              artifact.getVersion() );
-                MavenProject project =
-                    projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
-
-                model = project.getModel();
-            }
-            catch ( InvalidArtifactRTException e )
-            {
-                reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
-            }
-            catch ( ProjectBuildingException e )
-            {
-                reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
-            }
-
-            reporter.removeArtifact( artifact );
-
-            reportGroup.processArtifact( artifact, model, reporter );
-        }
-
-        reportingStore.storeReports( reporter, repository );
-    }
-
-    public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        getLogger().debug(
-            "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
-        return reportingStore.getReportsFromStore( repository, reportGroup );
-    }
-
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException
-    {
-        // Flush (as in toilet, not store) the report database
-        ReportingDatabase database = getReportDatabase( repository, reportGroup );
-        database.clear();
-
-        // Discovery process
-        String layoutProperty = getRepositoryLayout( repository.getLayout() );
-        ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-
-        // Save some memory by not tracking paths we won't use
-        // TODO: Plexus CDC should be able to inject this configuration
-        discoverer.setTrackOmittedPaths( false );
-
-        List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
-        if ( !artifacts.isEmpty() )
-        {
-            getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
-
-            // Work through these in batches, then flush the project cache.
-            for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
-            {
-                int end = j + ARTIFACT_BUFFER_SIZE;
-                List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
-                // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
-                // run the reports.
-                runArtifactReports( reportGroup, currentArtifacts, repository );
-
-                // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
-                // around that. TODO: remove when it is configurable
-                flushProjectBuilderCacheHack();
-            }
-        }
-
-        MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
-        List metadata =
-            metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-
-        if ( !metadata.isEmpty() )
-        {
-            getLogger().info( "Discovered " + metadata.size() + " metadata files" );
-
-            // run the reports
-            runMetadataReports( reportGroup, metadata, repository );
-        }
-    }
-
-    private String getRepositoryLayout( ArtifactRepositoryLayout layout )
-    {
-        // gross limitation that there is no reverse lookup of the hint for the layout.
-        if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
-        {
-            return "default";
-        }
-        else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
-        {
-            return "legacy";
-        }
-        else
-        {
-            throw new IllegalArgumentException( "Unknown layout: " + layout );
-        }
-    }
-
-    private void flushProjectBuilderCacheHack()
-    {
-        try
-        {
-            if ( projectBuilder != null )
-            {
-                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
-                f.setAccessible( true );
-                Map cache = (Map) f.get( projectBuilder );
-                cache.clear();
-
-                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
-                f.setAccessible( true );
-                cache = (Map) f.get( projectBuilder );
-                cache.clear();
-            }
-        }
-        catch ( NoSuchFieldException e )
-        {
-            throw new RuntimeException( e );
-        }
-        catch ( IllegalAccessException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportGroup.java
deleted file mode 100644 (file)
index 3f80c28..0000000
+++ /dev/null
@@ -1,67 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * The default report set, for repository health.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ReportGroup" role-hint="health"
- * @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
- */
-public class DefaultReportGroup
-    extends AbstractReportGroup
-{
-    /**
-     * Role hints of the reports to include in this set.
-     */
-    private static final Map reports = new LinkedHashMap();
-
-    static
-    {
-        reports.put( "checksum", "Checksum Problems" );
-        reports.put( "dependency", "Dependency Problems" );
-        // TODO re-enable duplicate, once a way to populate the index is determined!
-//        reports.put( "duplicate", "Duplicate Artifact Problems" );
-        reports.put( "invalid-pom", "POM Problems" );
-        reports.put( "bad-metadata", "Metadata Problems" );
-        reports.put( "checksum-metadata", "Metadata Checksum Problems" );
-        reports.put( "artifact-location", "Artifact Location Problems" );
-    }
-
-    public boolean includeReport( String key )
-    {
-        return reports.containsKey( key );
-    }
-
-    public Map getReports()
-    {
-        return reports;
-    }
-
-    public String getName()
-    {
-        return "Repository Health";
-    }
-
-    public String getFilename()
-    {
-        return "health-report.xml";
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DefaultReportingStore.java
deleted file mode 100644 (file)
index 768fd5e..0000000
+++ /dev/null
@@ -1,138 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultReportingStore
-    extends AbstractLogEnabled
-    implements ReportingStore
-{
-    /**
-     * The cached reports for given repositories.
-     */
-    private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
-
-    public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException
-    {
-        String key = getKey( repository, reportGroup );
-        ReportingDatabase database = (ReportingDatabase) reports.get( key );
-
-        if ( database == null )
-        {
-            ReportingXpp3Reader reader = new ReportingXpp3Reader();
-
-            File file = getReportFilename( repository, reportGroup );
-
-            FileReader fileReader = null;
-            try
-            {
-                fileReader = new FileReader( file );
-            }
-            catch ( FileNotFoundException e )
-            {
-                database = new ReportingDatabase( reportGroup, repository );
-            }
-
-            if ( database == null )
-            {
-                getLogger().info( "Reading report database from " + file );
-                try
-                {
-                    Reporting reporting = reader.read( fileReader, false );
-                    database = new ReportingDatabase( reportGroup, reporting, repository );
-                }
-                catch ( IOException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                catch ( XmlPullParserException e )
-                {
-                    throw new ReportingStoreException( e.getMessage(), e );
-                }
-                finally
-                {
-                    IOUtil.close( fileReader );
-                }
-            }
-
-            reports.put( key, database );
-        }
-        return database;
-    }
-
-    private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return repository.getId() + "/" + reportGroup.getFilename();
-    }
-
-    private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
-    {
-        return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
-    }
-
-    public void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException
-    {
-        database.updateTimings();
-
-        ReportingXpp3Writer writer = new ReportingXpp3Writer();
-
-        File file = getReportFilename( repository, database.getReportGroup() );
-        getLogger().info( "Writing reports to " + file );
-        FileWriter fileWriter = null;
-        try
-        {
-            file.getParentFile().mkdirs();
-
-            fileWriter = new FileWriter( file );
-            writer.write( fileWriter, database.getReporting() );
-        }
-        catch ( IOException e )
-        {
-            throw new ReportingStoreException( e.getMessage(), e );
-        }
-        finally
-        {
-            IOUtil.close( fileWriter );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessor.java
deleted file mode 100644 (file)
index c62b2f1..0000000
+++ /dev/null
@@ -1,158 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.layer.RepositoryQueryLayer;
-import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.model.Dependency;
-import org.apache.maven.model.Model;
-
-import java.text.MessageFormat;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="dependency"
- */
-public class DependencyArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryQueryLayerFactory layerFactory;
-
-    private static final String POM = "pom";
-
-    private static final String ROLE_HINT = "dependency";
-
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
-        if ( !queryLayer.containsArtifact( artifact ) )
-        {
-            // TODO: is this even possible?
-            addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
-        }
-
-        if ( model != null && POM.equals( artifact.getType() ) )
-        {
-            List dependencies = model.getDependencies();
-            processDependencies( dependencies, reporter, queryLayer, artifact );
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
-    }
-
-    private void processDependencies( List dependencies, ReportingDatabase reporter,
-                                      RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
-    {
-        if ( dependencies.size() > 0 )
-        {
-            Iterator iterator = dependencies.iterator();
-            while ( iterator.hasNext() )
-            {
-                Dependency dependency = (Dependency) iterator.next();
-
-                try
-                {
-                    Artifact artifact = createArtifact( dependency );
-
-                    // TODO: handle ranges properly. We should instead be mapping out all the artifacts in the
-                    // repository and mapping out the graph
-
-                    if ( artifact.getVersion() == null )
-                    {
-                        // it was a range, for now presume it exists
-                        continue;
-                    }
-
-                    if ( !repositoryQueryLayer.containsArtifact( artifact ) )
-                    {
-                        String reason = MessageFormat.format(
-                            "Artifact''s dependency {0} does not exist in the repository",
-                            new String[]{getDependencyString( dependency )} );
-                        addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
-                                    reason );
-                    }
-                }
-                catch ( InvalidVersionSpecificationException e )
-                {
-                    String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
-                                                          new String[]{getDependencyString( dependency ),
-                                                              dependency.getVersion()} );
-                    addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
-                }
-            }
-        }
-    }
-
-    private String getDependencyKey( Dependency dependency )
-    {
-        String str = dependency.getGroupId();
-        str += ":" + dependency.getArtifactId();
-        str += ":" + dependency.getVersion();
-        str += ":" + dependency.getType();
-        if ( dependency.getClassifier() != null )
-        {
-            str += ":" + dependency.getClassifier();
-        }
-        return str;
-    }
-
-    static String getDependencyString( Dependency dependency )
-    {
-        String str = "(group=" + dependency.getGroupId();
-        str += ", artifact=" + dependency.getArtifactId();
-        str += ", version=" + dependency.getVersion();
-        str += ", type=" + dependency.getType();
-        if ( dependency.getClassifier() != null )
-        {
-            str += ", classifier=" + dependency.getClassifier();
-        }
-        str += ")";
-        return str;
-    }
-
-    private Artifact createArtifact( Dependency dependency )
-        throws InvalidVersionSpecificationException
-    {
-        VersionRange spec = VersionRange.createFromVersionSpec( dependency.getVersion() );
-
-        if ( spec == null )
-        {
-            throw new InvalidVersionSpecificationException( "Dependency version was null" );
-        }
-
-        return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
-                                                         dependency.getType(), dependency.getClassifier(),
-                                                         dependency.getScope() );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessor.java
deleted file mode 100644 (file)
index a05062d..0000000
+++ /dev/null
@@ -1,131 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
-import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
-import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-
-import java.io.File;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Validates an artifact file for duplicates within the same groupId based from what's available in a repository index.
- *
- * @author Edwin Punzalan
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="duplicate"
- */
-public class DuplicateArtifactFileReportProcessor
-    implements ArtifactReportProcessor
-{
-    /**
-     * @plexus.requirement role-hint="md5"
-     */
-    private Digester digester;
-
-    /**
-     * @plexus.requirement
-     */
-    private RepositoryArtifactIndexFactory indexFactory;
-
-    /**
-     * @plexus.configuration
-     */
-    private String indexDirectory;
-
-    private static final String ROLE_HINT = "duplicate";
-
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-        if ( artifact.getFile() != null )
-        {
-            RepositoryArtifactIndex index = indexFactory.createStandardIndex( new File( indexDirectory ) );
-
-            String checksum = null;
-            try
-            {
-                checksum = digester.calc( artifact.getFile() );
-            }
-            catch ( DigesterException e )
-            {
-                addWarning( reporter, artifact, null,
-                            "Unable to generate checksum for " + artifact.getFile() + ": " + e );
-            }
-
-            if ( checksum != null )
-            {
-                try
-                {
-                    List results = index.search( new LuceneQuery(
-                        new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
-
-                    if ( !results.isEmpty() )
-                    {
-                        for ( Iterator i = results.iterator(); i.hasNext(); )
-                        {
-                            StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
-
-                            //make sure it is not the same artifact
-                            if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
-                            {
-                                //report only duplicates from the same groupId
-                                String groupId = artifact.getGroupId();
-                                if ( groupId.equals( result.getGroupId() ) )
-                                {
-                                    addFailure( reporter, artifact, "duplicate",
-                                                 "Found duplicate for " + artifact.getId() );
-                                }
-                            }
-                        }
-                    }
-                }
-                catch ( RepositoryIndexSearchException e )
-                {
-                    addWarning( reporter, artifact, null, "Failed to search in index" + e );
-                }
-            }
-        }
-        else
-        {
-            addWarning( reporter, artifact, null, "Artifact file is null" );
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
-    }
-
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, problem, reason );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessor.java
deleted file mode 100644 (file)
index 3ed9246..0000000
+++ /dev/null
@@ -1,99 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-
-/**
- * This class validates well-formedness of pom xml file.
- *
- * @todo nice to have this a specific, tested report - however it is likely to double up with project building exceptions from IndexerTask. Resolve [!]
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="invalid-pom"
- */
-public class InvalidPomArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    private static final String ROLE_HINT = "invalid-pom";
-
-    /**
-     * @param artifact The pom xml file to be validated, passed as an artifact object.
-     * @param reporter The artifact reporter object.
-     */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        if ( "pom".equals( artifact.getType().toLowerCase() ) )
-        {
-            File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
-
-            if ( !f.exists() )
-            {
-                addFailure( reporter, artifact, "pom-missing", "POM not found." );
-            }
-            else
-            {
-                Reader reader = null;
-
-                MavenXpp3Reader pomReader = new MavenXpp3Reader();
-
-                try
-                {
-                    reader = new FileReader( f );
-                    pomReader.read( reader );
-                }
-                catch ( XmlPullParserException e )
-                {
-                    addFailure( reporter, artifact, "pom-parse-exception",
-                                "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
-                }
-                catch ( IOException e )
-                {
-                    addFailure( reporter, artifact, "pom-io-exception",
-                                "Error while reading the pom xml file: " + e.getMessage() );
-                }
-                finally
-                {
-                    IOUtil.close( reader );
-                }
-            }
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessor.java
deleted file mode 100644 (file)
index 2b03592..0000000
+++ /dev/null
@@ -1,244 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-
-/**
- * Validate the location of the artifact based on the values indicated
- * in its pom (both the pom packaged with the artifact & the pom in the
- * file system).
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="artifact-location"
- */
-public class LocationArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    /**
-     * @plexus.requirement
-     */
-    private ArtifactFactory artifactFactory;
-
-    // TODO: share with other code with the same
-    private static final Set JAR_FILE_TYPES =
-        new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
-
-    /**
-     * @plexus.requirement
-     */
-    private MavenProjectBuilder projectBuilder;
-
-    private static final String POM = "pom";
-
-    private static final String ROLE_HINT = "artifact-location";
-
-    /**
-     * Check whether the artifact is in its proper location. The location of the artifact
-     * is validated first against the groupId, artifactId and versionId in the specified model
-     * object (pom in the file system). Then unpack the artifact (jar file) and get the model (pom)
-     * included in the package. If a model exists inside the package, then check if the artifact's
-     * location is valid based on the location specified in the pom. Check if the both the location
-     * specified in the file system pom and in the pom included in the package is the same.
-     */
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        adjustDistributionArtifactHandler( artifact );
-
-        String artifactPath = repository.pathOf( artifact );
-
-        if ( model != null )
-        {
-            // only check if it is a standalone POM, or an artifact other than a POM
-            // ie, don't check the location of the POM for another artifact matches that of the artifact
-            if ( !POM.equals( artifact.getType() ) || POM.equals( model.getPackaging() ) )
-            {
-                //check if the artifact is located in its proper location based on the info
-                //specified in the model object/pom
-                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
-                                                                                       model.getArtifactId(),
-                                                                                       model.getVersion(),
-                                                                                       artifact.getType(),
-                                                                                       artifact.getClassifier() );
-
-                adjustDistributionArtifactHandler( modelArtifact );
-                String modelPath = repository.pathOf( modelArtifact );
-                if ( !modelPath.equals( artifactPath ) )
-                {
-                    addFailure( reporter, artifact, "repository-pom-location",
-                                "The artifact is out of place. It does not match the specified location in the repository pom: " +
-                                    modelPath );
-                }
-            }
-        }
-
-        // get the location of the artifact itself
-        File file = new File( repository.getBasedir(), artifactPath );
-
-        if ( file.exists() )
-        {
-            if ( JAR_FILE_TYPES.contains( artifact.getType() ) )
-            {
-                //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
-                //check if the pom is included in the package
-                Model extractedModel = readArtifactModel( file, artifact, reporter );
-
-                if ( extractedModel != null )
-                {
-                    Artifact extractedArtifact = artifactFactory.createBuildArtifact( extractedModel.getGroupId(),
-                                                                                      extractedModel.getArtifactId(),
-                                                                                      extractedModel.getVersion(),
-                                                                                      extractedModel.getPackaging() );
-                    if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
-                    {
-                        addFailure( reporter, artifact, "packaged-pom-location",
-                                    "The artifact is out of place. It does not match the specified location in the packaged pom." );
-                    }
-                }
-            }
-        }
-        else
-        {
-            addFailure( reporter, artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
-        }
-    }
-
-    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
-    }
-
-    private static void adjustDistributionArtifactHandler( Artifact artifact )
-    {
-        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
-        if ( "distribution-zip".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
-        }
-        else if ( "distribution-tgz".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
-        }
-    }
-
-    private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
-    {
-        Model model = null;
-
-        JarFile jar = null;
-        try
-        {
-            jar = new JarFile( file );
-
-            //Get the entry and its input stream.
-            JarEntry entry = jar.getJarEntry(
-                "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
-
-            // If the entry is not null, extract it.
-            if ( entry != null )
-            {
-                model = readModel( jar.getInputStream( entry ) );
-
-                if ( model.getGroupId() == null )
-                {
-                    model.setGroupId( model.getParent().getGroupId() );
-                }
-                if ( model.getVersion() == null )
-                {
-                    model.setVersion( model.getParent().getVersion() );
-                }
-            }
-        }
-        catch ( IOException e )
-        {
-            addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
-        }
-        catch ( XmlPullParserException e )
-        {
-            addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
-        }
-        finally
-        {
-            if ( jar != null )
-            {
-                //noinspection UnusedCatchParameter
-                try
-                {
-                    jar.close();
-                }
-                catch ( IOException e )
-                {
-                    // ignore
-                }
-            }
-        }
-        return model;
-    }
-
-    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addWarning( artifact, ROLE_HINT, null, reason );
-    }
-
-    private Model readModel( InputStream entryStream )
-        throws IOException, XmlPullParserException
-    {
-        Reader isReader = new InputStreamReader( entryStream );
-
-        Model model;
-        try
-        {
-            MavenXpp3Reader pomReader = new MavenXpp3Reader();
-            model = pomReader.read( isReader );
-        }
-        finally
-        {
-            IOUtil.close( isReader );
-        }
-        return model;
-    }
-
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/MetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/MetadataReportProcessor.java
deleted file mode 100644 (file)
index d043a26..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * This interface is called by the main system for each piece of metadata as it is discovered.
- */
-public interface MetadataReportProcessor
-{
-    String ROLE = MetadataReportProcessor.class.getName();
-
-    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportGroup.java
deleted file mode 100644 (file)
index d41dc96..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * The report set for finding old artifacts (both snapshot and release)
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ReportGroup" role-hint="old-artifact"
- */
-public class OldArtifactReportGroup
-    extends AbstractReportGroup
-{
-    /**
-     * Role hints of the reports to include in this set.
-     *
-     * @todo implement these report processors!
-     */
-    private static final Map reports = new LinkedHashMap();
-
-    static
-    {
-        reports.put( "old-artifact", "Old Artifacts" );
-        reports.put( "old-snapshot-artifact", "Old Snapshot Artifacts" );
-    }
-
-    public boolean includeReport( String key )
-    {
-        return reports.containsKey( key );
-    }
-
-    public Map getReports()
-    {
-        return reports;
-    }
-
-    public String getFilename()
-    {
-        return "old-artifacts-report.xml";
-    }
-
-    public String getName()
-    {
-        return "Old Artifacts";
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessor.java
deleted file mode 100644 (file)
index ffd6791..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-
-import java.io.File;
-
-/**
- * Find artifacts in the repository that are considered old.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="old-artifact"
- * @todo make this configurable from the web interface
- */
-public class OldArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    private static final String ROLE_HINT = "old-artifact";
-
-    /**
-     * The maximum age of an artifact before it is reported old, specified in seconds. The default is 1 year.
-     *
-     * @plexus.configuration default-value="31536000"
-     */
-    private int maxAge;
-
-    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        adjustDistributionArtifactHandler( artifact );
-
-        String artifactPath = repository.pathOf( artifact );
-
-        //get the location of the artifact itself
-        File file = new File( repository.getBasedir(), artifactPath );
-
-        if ( file.exists() )
-        {
-            if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
-            {
-                // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-                reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
-                                    "The artifact is older than the maximum age of " + maxAge + " seconds." );
-            }
-        }
-        else
-        {
-            throw new IllegalStateException( "Couldn't find artifact " + file );
-        }
-    }
-
-    private static void adjustDistributionArtifactHandler( Artifact artifact )
-    {
-        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
-        if ( "distribution-zip".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
-        }
-        else if ( "distribution-tgz".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessor.java
deleted file mode 100644 (file)
index b6e10ee..0000000
+++ /dev/null
@@ -1,179 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.regex.Matcher;
-
-/**
- * Find snapshot artifacts in the repository that are considered old.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="old-snapshot-artifact"
- * @todo make this configurable from the web interface
- */
-public class OldSnapshotArtifactReportProcessor
-    implements ArtifactReportProcessor
-{
-    private static final String ROLE_HINT = "old-snapshot-artifact";
-
-    /**
-     * The maximum age of an artifact before it is reported old, specified in seconds. The default is 1 year.
-     *
-     * @plexus.configuration default-value="31536000"
-     */
-    private int maxAge;
-
-    /**
-     * The maximum number of snapshots to retain within a given version. The default is 0, which keeps all snapshots
-     * that are within the age limits.
-     *
-     * @plexus.configuration default-value="0"
-     */
-    private int maxSnapshots;
-
-    public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
-    {
-        ArtifactRepository repository = artifact.getRepository();
-
-        if ( !"file".equals( repository.getProtocol() ) )
-        {
-            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
-            throw new UnsupportedOperationException(
-                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
-        }
-
-        adjustDistributionArtifactHandler( artifact );
-
-        String artifactPath = repository.pathOf( artifact );
-
-        //get the location of the artifact itself
-        File file = new File( repository.getBasedir(), artifactPath );
-
-        if ( file.exists() )
-        {
-            if ( artifact.isSnapshot() )
-            {
-                Matcher m = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
-                if ( m.matches() )
-                {
-                    long timestamp;
-                    try
-                    {
-                        timestamp = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).parse( m.group( 2 ) ).getTime();
-                    }
-                    catch ( ParseException e )
-                    {
-                        throw new IllegalStateException(
-                            "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
-                    }
-
-                    if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
-                    {
-                        addNotice( reporter, artifact, "snapshot-expired-time",
-                                   "The artifact is older than the maximum age of " + maxAge + " seconds." );
-                    }
-                    else if ( maxSnapshots > 0 )
-                    {
-                        File[] files = file.getParentFile().listFiles( new FilenameFilter()
-                        {
-                            public boolean accept( File file, String string )
-                            {
-                                return string.startsWith( artifact.getArtifactId() + "-" ) &&
-                                    string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
-                            }
-                        } );
-
-                        List/*<Integer>*/ buildNumbers = new ArrayList();
-                        Integer currentBuild = null;
-                        for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
-                        {
-                            File f = (File) i.next();
-
-                            // trim to version
-                            int startIndex = artifact.getArtifactId().length() + 1;
-                            int extensionLength = artifact.getArtifactHandler().getExtension().length() + 1;
-                            int endIndex = f.getName().length() - extensionLength;
-                            String name = f.getName().substring( startIndex, endIndex );
-
-                            Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( name );
-
-                            if ( matcher.matches() )
-                            {
-                                Integer buildNumber = Integer.valueOf( matcher.group( 3 ) );
-
-                                buildNumbers.add( buildNumber );
-                                if ( name.equals( artifact.getVersion() ) )
-                                {
-                                    currentBuild = buildNumber;
-                                }
-                            }
-                        }
-
-                        // Prune back to expired build numbers
-                        Collections.sort( buildNumbers );
-                        for ( int i = 0; i < maxSnapshots && !buildNumbers.isEmpty(); i++ )
-                        {
-                            buildNumbers.remove( buildNumbers.size() - 1 );
-                        }
-
-                        if ( buildNumbers.contains( currentBuild ) )
-                        {
-                            addNotice( reporter, artifact, "snapshot-expired-count",
-                                       "The artifact is older than the maximum number of retained snapshot builds." );
-                        }
-                    }
-                }
-            }
-        }
-        else
-        {
-            throw new IllegalStateException( "Couldn't find artifact " + file );
-        }
-    }
-
-    private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
-    {
-        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
-        reporter.addNotice( artifact, ROLE_HINT, problem, reason );
-    }
-
-    private static void adjustDistributionArtifactHandler( Artifact artifact )
-    {
-        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
-        if ( "distribution-zip".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
-        }
-        else if ( "distribution-tgz".equals( artifact.getType() ) )
-        {
-            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportExecutor.java
deleted file mode 100644 (file)
index 07da5bc..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Executes a report or report group.
- */
-public interface ReportExecutor
-{
-    /**
-     * Plexus component role name.
-     */
-    String ROLE = ReportExecutor.class.getName();
-
-    /**
-     * Run reports on a set of metadata.
-     *
-     * @param reportGroup the report set to run
-     * @param metadata    the RepositoryMetadata objects to report on
-     * @param repository  the repository that they come from
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     */
-    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Run reports on a set of artifacts.
-     *
-     * @param reportGroup the report set to run
-     * @param artifacts   the Artifact objects to report on
-     * @param repository  the repository that they come from
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     */
-    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-    /**
-     * Get the report database in use for a given repository.
-     *
-     * @param repository  the repository
-     * @param reportGroup the report set to run
-     * @return the report database
-     * @throws ReportingStoreException if there is a problem reading the report database
-     */
-    ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
-     *
-     * @param repository          the repository to run from
-     * @param blacklistedPatterns the patterns to exclude during discovery
-     * @param filter              the filter to use during discovery to get a consistent list of artifacts
-     * @param reportGroup         the report set to run
-     * @throws ReportingStoreException if there is a problem reading/writing the report database
-     * @throws org.apache.maven.archiva.discoverer.DiscovererException
-     *                                 if there is a problem finding the artifacts and metadata to report on
-     */
-    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
-                            ArtifactFilter filter )
-        throws DiscovererException, ReportingStoreException;
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportGroup.java
deleted file mode 100644 (file)
index 50a5a4b..0000000
+++ /dev/null
@@ -1,85 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.model.Model;
-
-import java.util.Map;
-
-/**
- * A grouping or report processors for execution as a visible report from the web interface - eg, "health",
- * "old artifacts", etc.
- */
-public interface ReportGroup
-{
-    /**
-     * Plexus component role.
-     */
-    String ROLE = ReportGroup.class.getName();
-
-    /**
-     * Run any artifact related reports in the report set.
-     *
-     * @param artifact          the artifact to process
-     * @param model             the POM associated with the artifact to process
-     * @param reportingDatabase the report database to store results in
-     */
-    void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
-
-    /**
-     * Run any metadata related reports in the report set.
-     *
-     * @param repositoryMetadata the metadata to process
-     * @param repository         the repository the metadata is located in
-     * @param reportingDatabase  the report database to store results in
-     */
-    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
-                          ReportingDatabase reportingDatabase );
-
-    /**
-     * Whether a report with the given role hint is included in this report set.
-     *
-     * @param key the report role hint.
-     * @return whether the report is included
-     */
-    boolean includeReport( String key );
-
-    /**
-     * Get the report processors in this set. The map is keyed by the report's role hint, and the value is it's
-     * display name.
-     *
-     * @return the reports
-     */
-    Map getReports();
-
-    /**
-     * Get the user-friendly name of this report.
-     *
-     * @return the report name
-     */
-    String getName();
-
-    /**
-     * Get the filename of the reports within the repository's reports directory.
-     *
-     * @return the filename
-     */
-    String getFilename();
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingDatabase.java
deleted file mode 100644 (file)
index 78ae3c3..0000000
+++ /dev/null
@@ -1,608 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * @todo i18n, including message formatting and parameterisation
- */
-public class ReportingDatabase
-{
-    private final Reporting reporting;
-
-    private Map artifactMap;
-
-    private Map metadataMap;
-
-    private int numFailures;
-
-    private int numWarnings;
-
-    private ArtifactRepository repository;
-
-    private boolean inProgress;
-
-    private long startTime;
-
-    private final ReportGroup reportGroup;
-
-    private Set metadataWithProblems;
-
-    private Map filteredDatabases = new HashMap();
-
-    private int numNotices;
-
-    public ReportingDatabase( ReportGroup reportGroup )
-    {
-        this( reportGroup, new Reporting() );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
-    {
-        this( reportGroup, reporting, null );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
-    {
-        this( reportGroup, new Reporting(), repository );
-    }
-
-    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
-    {
-        this.reportGroup = reportGroup;
-
-        this.reporting = reporting;
-
-        this.repository = repository;
-
-        initArtifactMap();
-
-        initMetadataMap();
-    }
-
-    public void addFailure( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        results.addFailure( createResult( processor, problem, reason ) );
-        numFailures++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        results.addNotice( createResult( processor, problem, reason ) );
-        numNotices++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( artifact, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( Artifact artifact, String processor, String problem, String reason )
-    {
-        ArtifactResults results = getArtifactResults( artifact );
-        results.addWarning( createResult( processor, problem, reason ) );
-        numWarnings++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( artifact, processor, problem, reason );
-        }
-    }
-
-    private ArtifactResults getArtifactResults( Artifact artifact )
-    {
-        return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                   artifact.getType(), artifact.getClassifier() );
-    }
-
-    private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
-                                                String classifier )
-    {
-        Map artifactMap = this.artifactMap;
-
-        String key = getArtifactKey( groupId, artifactId, version, type, classifier );
-        ArtifactResults results = (ArtifactResults) artifactMap.get( key );
-        if ( results == null )
-        {
-            results = new ArtifactResults();
-            results.setArtifactId( artifactId );
-            results.setClassifier( classifier );
-            results.setGroupId( groupId );
-            results.setType( type );
-            results.setVersion( version );
-
-            artifactMap.put( key, results );
-            reporting.getArtifacts().add( results );
-        }
-
-        return results;
-    }
-
-    private void initArtifactMap()
-    {
-        Map map = new HashMap();
-        for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-        {
-            ArtifactResults result = (ArtifactResults) i.next();
-
-            String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
-                                         result.getType(), result.getClassifier() );
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-        }
-        artifactMap = map;
-    }
-
-    private static String getArtifactKey( String groupId, String artifactId, String version, String type,
-                                          String classifier )
-    {
-        return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
-    }
-
-    private static Result createResult( String processor, String problem, String reason )
-    {
-        Result result = new Result();
-        result.setProcessor( processor );
-        result.setProblem( problem );
-        result.setReason( reason );
-        return result;
-    }
-
-    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        results.addFailure( createResult( processor, problem, reason ) );
-        numFailures++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addFailure( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        results.addWarning( createResult( processor, problem, reason ) );
-        numWarnings++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addWarning( metadata, processor, problem, reason );
-        }
-    }
-
-    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
-    {
-        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
-        if ( !metadataWithProblems.contains( results ) )
-        {
-            metadataWithProblems.add( results );
-        }
-        results.addNotice( createResult( processor, problem, reason ) );
-        numNotices++;
-        updateTimings();
-
-        if ( filteredDatabases.containsKey( problem ) )
-        {
-            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
-            reportingDatabase.addNotice( metadata, processor, problem, reason );
-        }
-    }
-
-    public Set getMetadataWithProblems()
-    {
-        return metadataWithProblems;
-    }
-
-    private void initMetadataMap()
-    {
-        Map map = new HashMap();
-        Set problems = new LinkedHashSet();
-
-        for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
-        {
-            MetadataResults result = (MetadataResults) i.next();
-
-            String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
-
-            map.put( key, result );
-
-            numFailures += result.getFailures().size();
-            numWarnings += result.getWarnings().size();
-            numNotices += result.getNotices().size();
-
-            if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
-            {
-                problems.add( result );
-            }
-        }
-        metadataMap = map;
-        metadataWithProblems = problems;
-    }
-
-    private static String getMetadataKey( String groupId, String artifactId, String version )
-    {
-        return groupId + ":" + artifactId + ":" + version;
-    }
-
-    public int getNumFailures()
-    {
-        return numFailures;
-    }
-
-    public int getNumWarnings()
-    {
-        return numWarnings;
-    }
-
-    public Reporting getReporting()
-    {
-        return reporting;
-    }
-
-    public Iterator getArtifactIterator()
-    {
-        return reporting.getArtifacts().iterator();
-    }
-
-    public Iterator getMetadataIterator()
-    {
-        return reporting.getMetadata().iterator();
-    }
-
-    public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
-    {
-        String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
-        Map map = metadataMap;
-        MetadataResults results = (MetadataResults) map.get( key );
-        return results != null && results.getLastModified() >= timestamp;
-    }
-
-    /**
-     * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
-     *
-     * @param metadata     the metadata
-     * @param lastModified the modification time of the file being tracked
-     */
-    public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
-    {
-        MetadataResults results = getMetadataResults( metadata, lastModified );
-
-        results.setLastModified( lastModified );
-
-        numFailures -= results.getFailures().size();
-        results.getFailures().clear();
-
-        numWarnings -= results.getWarnings().size();
-        results.getWarnings().clear();
-
-        numNotices -= results.getWarnings().size();
-        results.getNotices().clear();
-
-        metadataWithProblems.remove( results );
-    }
-
-    private MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
-    {
-        return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
-                                   lastModified );
-    }
-
-    private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
-                                                long lastModified )
-    {
-        String key = getMetadataKey( groupId, artifactId, baseVersion );
-        Map metadataMap = this.metadataMap;
-        MetadataResults results = (MetadataResults) metadataMap.get( key );
-        if ( results == null )
-        {
-            results = new MetadataResults();
-            results.setArtifactId( artifactId );
-            results.setGroupId( groupId );
-            results.setVersion( baseVersion );
-            results.setLastModified( lastModified );
-
-            metadataMap.put( key, results );
-            reporting.getMetadata().add( results );
-        }
-        return results;
-    }
-
-    public void removeArtifact( Artifact artifact )
-    {
-        Map map = artifactMap;
-
-        String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
-                                     artifact.getType(), artifact.getClassifier() );
-        ArtifactResults results = (ArtifactResults) map.get( key );
-        if ( results != null )
-        {
-            for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                if ( results.equals( i.next() ) )
-                {
-                    i.remove();
-                }
-            }
-
-            numFailures -= results.getFailures().size();
-            numWarnings -= results.getWarnings().size();
-            numNotices -= results.getNotices().size();
-
-            map.remove( key );
-        }
-    }
-
-    public ArtifactRepository getRepository()
-    {
-        return repository;
-    }
-
-    public boolean isInProgress()
-    {
-        return inProgress;
-    }
-
-    public void setInProgress( boolean inProgress )
-    {
-        this.inProgress = inProgress;
-
-        if ( inProgress )
-        {
-            startTime = System.currentTimeMillis();
-        }
-    }
-
-    public void clear()
-    {
-        // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
-        numWarnings = 0;
-        numNotices = 0;
-        numFailures = 0;
-
-        artifactMap.clear();
-        metadataMap.clear();
-        metadataWithProblems.clear();
-        filteredDatabases.clear();
-
-        reporting.getArtifacts().clear();
-        reporting.getMetadata().clear();
-
-        updateTimings();
-    }
-
-    public void setStartTime( long startTime )
-    {
-        this.startTime = startTime;
-    }
-
-    public long getStartTime()
-    {
-        return startTime;
-    }
-
-    public void updateTimings()
-    {
-        long startTime = getStartTime();
-        Date endTime = new Date();
-        if ( startTime > 0 )
-        {
-            getReporting().setExecutionTime( endTime.getTime() - startTime );
-        }
-        getReporting().setLastModified( endTime.getTime() );
-    }
-
-    public ReportGroup getReportGroup()
-    {
-        return reportGroup;
-    }
-
-    public ReportingDatabase getFilteredDatabase( String filter )
-    {
-        ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
-
-        if ( reportingDatabase == null )
-        {
-            reportingDatabase = new ReportingDatabase( reportGroup, repository );
-
-            Reporting reporting = reportingDatabase.getReporting();
-            reporting.setExecutionTime( this.reporting.getExecutionTime() );
-            reporting.setLastModified( this.reporting.getLastModified() );
-
-            for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
-            {
-                ArtifactResults results = (ArtifactResults) i.next();
-                ArtifactResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createArtifactResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-            for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
-            {
-                MetadataResults results = (MetadataResults) i.next();
-                MetadataResults targetResults = null;
-                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addFailure( result );
-                        reportingDatabase.numFailures++;
-                    }
-                }
-                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addWarning( result );
-                        reportingDatabase.numWarnings++;
-                    }
-                }
-                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
-                {
-                    Result result = (Result) j.next();
-
-                    if ( filter.equals( result.getProcessor() ) )
-                    {
-                        if ( targetResults == null )
-                        {
-                            // lazily create so it is not added unless it has to be
-                            targetResults = createMetadataResults( reportingDatabase, results );
-                        }
-
-                        targetResults.addNotice( result );
-                        reportingDatabase.numNotices++;
-                    }
-                }
-            }
-
-            filteredDatabases.put( filter, reportingDatabase );
-        }
-
-        return reportingDatabase;
-    }
-
-    private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
-    {
-        MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
-                                                                              results.getArtifactId(),
-                                                                              results.getVersion(),
-                                                                              results.getLastModified() );
-        reportingDatabase.metadataWithProblems.add( targetResults );
-        return targetResults;
-    }
-
-    private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
-    {
-        return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
-                                                     results.getVersion(), results.getType(), results.getClassifier() );
-    }
-
-    public int getNumNotices()
-    {
-        return numNotices;
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingMetadataFilter.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingMetadataFilter.java
deleted file mode 100644 (file)
index 9ef876c..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Implementation of a reporting filter. Artifacts already in the database are ignored.
- */
-public class ReportingMetadataFilter
-    implements MetadataFilter
-{
-    private final ReportingDatabase reporter;
-
-    public ReportingMetadataFilter( ReportingDatabase reporter )
-    {
-        this.reporter = reporter;
-    }
-
-    public boolean include( RepositoryMetadata metadata, long timestamp )
-    {
-        return !reporter.isMetadataUpToDate( metadata, timestamp );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStore.java
deleted file mode 100644 (file)
index 8f52a5c..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * A component for loading the reporting database into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ReportingStore
-{
-    /**
-     * The Plexus role for the component.
-     */
-    String ROLE = ReportingStore.class.getName();
-
-    /**
-     * Get the reports from the store. A cached version may be used.
-     *
-     * @param repository  the repository to load the reports for
-     * @param reportGroup the report group to get the report for
-     * @return the reporting database
-     * @throws ReportingStoreException if there was a problem reading the store
-     */
-    ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
-        throws ReportingStoreException;
-
-    /**
-     * Save the reporting to the store.
-     *
-     * @param database   the reports to store
-     * @param repository the repositorry to store the reports in
-     * @throws ReportingStoreException if there was a problem writing the store
-     */
-    void storeReports( ReportingDatabase database, ArtifactRepository repository )
-        throws ReportingStoreException;
-
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStoreException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingStoreException.java
deleted file mode 100644 (file)
index 5e797e4..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occurring using the reporting store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ReportingStoreException
-    extends Exception
-{
-    public ReportingStoreException( String message )
-    {
-        super( message );
-    }
-
-    public ReportingStoreException( String message, Throwable e )
-    {
-        super( message, e );
-    }
-}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java
new file mode 100644 (file)
index 0000000..b009825
--- /dev/null
@@ -0,0 +1,609 @@
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Reporting;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * @todo i18n, including message formatting and parameterisation
+ */
+public class ReportingDatabase
+{
+    private final Reporting reporting;
+
+    private Map artifactMap;
+
+    private Map metadataMap;
+
+    private int numFailures;
+
+    private int numWarnings;
+
+    private ArtifactRepository repository;
+
+    private boolean inProgress;
+
+    private long startTime;
+
+    private final ReportGroup reportGroup;
+
+    private Set metadataWithProblems;
+
+    private Map filteredDatabases = new HashMap();
+
+    private int numNotices;
+
+    public ReportingDatabase( ReportGroup reportGroup )
+    {
+        this( reportGroup, new Reporting() );
+    }
+
+    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
+    {
+        this( reportGroup, reporting, null );
+    }
+
+    public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
+    {
+        this( reportGroup, new Reporting(), repository );
+    }
+
+    public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
+    {
+        this.reportGroup = reportGroup;
+
+        this.reporting = reporting;
+
+        this.repository = repository;
+
+        initArtifactMap();
+
+        initMetadataMap();
+    }
+
+    public void addFailure( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        results.addFailure( createResult( processor, problem, reason ) );
+        numFailures++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addFailure( artifact, processor, problem, reason );
+        }
+    }
+
+    public void addNotice( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        results.addNotice( createResult( processor, problem, reason ) );
+        numNotices++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addNotice( artifact, processor, problem, reason );
+        }
+    }
+
+    public void addWarning( Artifact artifact, String processor, String problem, String reason )
+    {
+        ArtifactResults results = getArtifactResults( artifact );
+        results.addWarning( createResult( processor, problem, reason ) );
+        numWarnings++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addWarning( artifact, processor, problem, reason );
+        }
+    }
+
+    private ArtifactResults getArtifactResults( Artifact artifact )
+    {
+        return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+                                   artifact.getType(), artifact.getClassifier() );
+    }
+
+    private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
+                                                String classifier )
+    {
+        Map artifactMap = this.artifactMap;
+
+        String key = getArtifactKey( groupId, artifactId, version, type, classifier );
+        ArtifactResults results = (ArtifactResults) artifactMap.get( key );
+        if ( results == null )
+        {
+            results = new ArtifactResults();
+            results.setArtifactId( artifactId );
+            results.setClassifier( classifier );
+            results.setGroupId( groupId );
+            results.setType( type );
+            results.setVersion( version );
+
+            artifactMap.put( key, results );
+            reporting.getArtifacts().add( results );
+        }
+
+        return results;
+    }
+
+    private void initArtifactMap()
+    {
+        Map map = new HashMap();
+        for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
+        {
+            ArtifactResults result = (ArtifactResults) i.next();
+
+            String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
+                                         result.getType(), result.getClassifier() );
+            map.put( key, result );
+
+            numFailures += result.getFailures().size();
+            numWarnings += result.getWarnings().size();
+            numNotices += result.getNotices().size();
+        }
+        artifactMap = map;
+    }
+
+    private static String getArtifactKey( String groupId, String artifactId, String version, String type,
+                                          String classifier )
+    {
+        return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
+    }
+
+    private static Result createResult( String processor, String problem, String reason )
+    {
+        Result result = new Result();
+        result.setProcessor( processor );
+        result.setProblem( problem );
+        result.setReason( reason );
+        return result;
+    }
+
+    public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
+        if ( !metadataWithProblems.contains( results ) )
+        {
+            metadataWithProblems.add( results );
+        }
+        results.addFailure( createResult( processor, problem, reason ) );
+        numFailures++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addFailure( metadata, processor, problem, reason );
+        }
+    }
+
+    public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
+        if ( !metadataWithProblems.contains( results ) )
+        {
+            metadataWithProblems.add( results );
+        }
+        results.addWarning( createResult( processor, problem, reason ) );
+        numWarnings++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addWarning( metadata, processor, problem, reason );
+        }
+    }
+
+    public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
+    {
+        MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
+        if ( !metadataWithProblems.contains( results ) )
+        {
+            metadataWithProblems.add( results );
+        }
+        results.addNotice( createResult( processor, problem, reason ) );
+        numNotices++;
+        updateTimings();
+
+        if ( filteredDatabases.containsKey( problem ) )
+        {
+            ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+            reportingDatabase.addNotice( metadata, processor, problem, reason );
+        }
+    }
+
+    public Set getMetadataWithProblems()
+    {
+        return metadataWithProblems;
+    }
+
+    private void initMetadataMap()
+    {
+        Map map = new HashMap();
+        Set problems = new LinkedHashSet();
+
+        for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
+        {
+            MetadataResults result = (MetadataResults) i.next();
+
+            String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
+
+            map.put( key, result );
+
+            numFailures += result.getFailures().size();
+            numWarnings += result.getWarnings().size();
+            numNotices += result.getNotices().size();
+
+            if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
+            {
+                problems.add( result );
+            }
+        }
+        metadataMap = map;
+        metadataWithProblems = problems;
+    }
+
+    private static String getMetadataKey( String groupId, String artifactId, String version )
+    {
+        return groupId + ":" + artifactId + ":" + version;
+    }
+
+    public int getNumFailures()
+    {
+        return numFailures;
+    }
+
+    public int getNumWarnings()
+    {
+        return numWarnings;
+    }
+
+    public Reporting getReporting()
+    {
+        return reporting;
+    }
+
+    public Iterator getArtifactIterator()
+    {
+        return reporting.getArtifacts().iterator();
+    }
+
+    public Iterator getMetadataIterator()
+    {
+        return reporting.getMetadata().iterator();
+    }
+
+    public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+    {
+        String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
+        Map map = metadataMap;
+        MetadataResults results = (MetadataResults) map.get( key );
+        return results != null && results.getLastModified() >= timestamp;
+    }
+
+    /**
+     * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
+     *
+     * @param metadata     the metadata
+     * @param lastModified the modification time of the file being tracked
+     */
+    public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
+    {
+        MetadataResults results = getMetadataResults( metadata, lastModified );
+
+        results.setLastModified( lastModified );
+
+        numFailures -= results.getFailures().size();
+        results.getFailures().clear();
+
+        numWarnings -= results.getWarnings().size();
+        results.getWarnings().clear();
+
+        numNotices -= results.getWarnings().size();
+        results.getNotices().clear();
+
+        metadataWithProblems.remove( results );
+    }
+
+    private MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
+    {
+        return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
+                                   lastModified );
+    }
+
+    private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
+                                                long lastModified )
+    {
+        String key = getMetadataKey( groupId, artifactId, baseVersion );
+        Map metadataMap = this.metadataMap;
+        MetadataResults results = (MetadataResults) metadataMap.get( key );
+        if ( results == null )
+        {
+            results = new MetadataResults();
+            results.setArtifactId( artifactId );
+            results.setGroupId( groupId );
+            results.setVersion( baseVersion );
+            results.setLastModified( lastModified );
+
+            metadataMap.put( key, results );
+            reporting.getMetadata().add( results );
+        }
+        return results;
+    }
+
+    public void removeArtifact( Artifact artifact )
+    {
+        Map map = artifactMap;
+
+        String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+                                     artifact.getType(), artifact.getClassifier() );
+        ArtifactResults results = (ArtifactResults) map.get( key );
+        if ( results != null )
+        {
+            for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
+            {
+                if ( results.equals( i.next() ) )
+                {
+                    i.remove();
+                }
+            }
+
+            numFailures -= results.getFailures().size();
+            numWarnings -= results.getWarnings().size();
+            numNotices -= results.getNotices().size();
+
+            map.remove( key );
+        }
+    }
+
+    public ArtifactRepository getRepository()
+    {
+        return repository;
+    }
+
+    public boolean isInProgress()
+    {
+        return inProgress;
+    }
+
+    public void setInProgress( boolean inProgress )
+    {
+        this.inProgress = inProgress;
+
+        if ( inProgress )
+        {
+            startTime = System.currentTimeMillis();
+        }
+    }
+
+    public void clear()
+    {
+        // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
+        numWarnings = 0;
+        numNotices = 0;
+        numFailures = 0;
+
+        artifactMap.clear();
+        metadataMap.clear();
+        metadataWithProblems.clear();
+        filteredDatabases.clear();
+
+        reporting.getArtifacts().clear();
+        reporting.getMetadata().clear();
+
+        updateTimings();
+    }
+
+    public void setStartTime( long startTime )
+    {
+        this.startTime = startTime;
+    }
+
+    public long getStartTime()
+    {
+        return startTime;
+    }
+
+    public void updateTimings()
+    {
+        long startTime = getStartTime();
+        Date endTime = new Date();
+        if ( startTime > 0 )
+        {
+            getReporting().setExecutionTime( endTime.getTime() - startTime );
+        }
+        getReporting().setLastModified( endTime.getTime() );
+    }
+
+    public ReportGroup getReportGroup()
+    {
+        return reportGroup;
+    }
+
+    public ReportingDatabase getFilteredDatabase( String filter )
+    {
+        ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
+
+        if ( reportingDatabase == null )
+        {
+            reportingDatabase = new ReportingDatabase( reportGroup, repository );
+
+            Reporting reporting = reportingDatabase.getReporting();
+            reporting.setExecutionTime( this.reporting.getExecutionTime() );
+            reporting.setLastModified( this.reporting.getLastModified() );
+
+            for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
+            {
+                ArtifactResults results = (ArtifactResults) i.next();
+                ArtifactResults targetResults = null;
+                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createArtifactResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addFailure( result );
+                        reportingDatabase.numFailures++;
+                    }
+                }
+                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createArtifactResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addWarning( result );
+                        reportingDatabase.numWarnings++;
+                    }
+                }
+                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createArtifactResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addNotice( result );
+                        reportingDatabase.numNotices++;
+                    }
+                }
+            }
+            for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
+            {
+                MetadataResults results = (MetadataResults) i.next();
+                MetadataResults targetResults = null;
+                for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createMetadataResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addFailure( result );
+                        reportingDatabase.numFailures++;
+                    }
+                }
+                for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createMetadataResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addWarning( result );
+                        reportingDatabase.numWarnings++;
+                    }
+                }
+                for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
+                {
+                    Result result = (Result) j.next();
+
+                    if ( filter.equals( result.getProcessor() ) )
+                    {
+                        if ( targetResults == null )
+                        {
+                            // lazily create so it is not added unless it has to be
+                            targetResults = createMetadataResults( reportingDatabase, results );
+                        }
+
+                        targetResults.addNotice( result );
+                        reportingDatabase.numNotices++;
+                    }
+                }
+            }
+
+            filteredDatabases.put( filter, reportingDatabase );
+        }
+
+        return reportingDatabase;
+    }
+
+    private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+    {
+        MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
+                                                                              results.getArtifactId(),
+                                                                              results.getVersion(),
+                                                                              results.getLastModified() );
+        reportingDatabase.metadataWithProblems.add( targetResults );
+        return targetResults;
+    }
+
+    private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+    {
+        return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
+                                                     results.getVersion(), results.getType(), results.getClassifier() );
+    }
+
+    public int getNumNotices()
+    {
+        return numNotices;
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java
new file mode 100644 (file)
index 0000000..ca95295
--- /dev/null
@@ -0,0 +1,245 @@
+package org.apache.maven.archiva.reporting.executor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
+import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.executor.ReportExecutor;
+import org.apache.maven.archiva.reporting.store.ReportingStore;
+import org.apache.maven.archiva.reporting.store.ReportingStoreException;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Report executor implementation.
+ *
+ * @todo should the report set be limitable by configuration?
+ * @plexus.component
+ */
+public class DefaultReportExecutor
+    extends AbstractLogEnabled
+    implements ReportExecutor
+{
+    /**
+     * @plexus.requirement
+     */
+    private MavenProjectBuilder projectBuilder;
+
+    /**
+     * @plexus.requirement
+     */
+    private ReportingStore reportingStore;
+
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactFactory artifactFactory;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
+     */
+    private Map artifactDiscoverers;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
+     */
+    private Map metadataDiscoverers;
+
+    private static final int ARTIFACT_BUFFER_SIZE = 1000;
+
+    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
+        throws ReportingStoreException
+    {
+        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
+
+        for ( Iterator i = metadata.iterator(); i.hasNext(); )
+        {
+            RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
+
+            File file =
+                new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+            reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
+
+            reportGroup.processMetadata( repositoryMetadata, repository, reporter );
+        }
+
+        reportingStore.storeReports( reporter, repository );
+    }
+
+    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
+        throws ReportingStoreException
+    {
+        ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
+
+        for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+        {
+            Artifact artifact = (Artifact) i.next();
+
+            Model model = null;
+            try
+            {
+                Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
+                                                                              artifact.getArtifactId(),
+                                                                              artifact.getVersion() );
+                MavenProject project =
+                    projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+                model = project.getModel();
+            }
+            catch ( InvalidArtifactRTException e )
+            {
+                reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
+            }
+            catch ( ProjectBuildingException e )
+            {
+                reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
+            }
+
+            reporter.removeArtifact( artifact );
+
+            reportGroup.processArtifact( artifact, model, reporter );
+        }
+
+        reportingStore.storeReports( reporter, repository );
+    }
+
+    public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
+        throws ReportingStoreException
+    {
+        getLogger().debug(
+            "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
+        return reportingStore.getReportsFromStore( repository, reportGroup );
+    }
+
+    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
+                            ArtifactFilter filter )
+        throws DiscovererException, ReportingStoreException
+    {
+        // Flush (as in toilet, not store) the report database
+        ReportingDatabase database = getReportDatabase( repository, reportGroup );
+        database.clear();
+
+        // Discovery process
+        String layoutProperty = getRepositoryLayout( repository.getLayout() );
+        ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
+
+        // Save some memory by not tracking paths we won't use
+        // TODO: Plexus CDC should be able to inject this configuration
+        discoverer.setTrackOmittedPaths( false );
+
+        List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
+
+        if ( !artifacts.isEmpty() )
+        {
+            getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
+
+            // Work through these in batches, then flush the project cache.
+            for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
+            {
+                int end = j + ARTIFACT_BUFFER_SIZE;
+                List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
+
+                // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
+
+                // run the reports.
+                runArtifactReports( reportGroup, currentArtifacts, repository );
+
+                // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
+                // around that. TODO: remove when it is configurable
+                flushProjectBuilderCacheHack();
+            }
+        }
+
+        MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
+        List metadata =
+            metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
+
+        if ( !metadata.isEmpty() )
+        {
+            getLogger().info( "Discovered " + metadata.size() + " metadata files" );
+
+            // run the reports
+            runMetadataReports( reportGroup, metadata, repository );
+        }
+    }
+
+    private String getRepositoryLayout( ArtifactRepositoryLayout layout )
+    {
+        // gross limitation that there is no reverse lookup of the hint for the layout.
+        if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
+        {
+            return "default";
+        }
+        else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
+        {
+            return "legacy";
+        }
+        else
+        {
+            throw new IllegalArgumentException( "Unknown layout: " + layout );
+        }
+    }
+
+    private void flushProjectBuilderCacheHack()
+    {
+        try
+        {
+            if ( projectBuilder != null )
+            {
+                java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
+                f.setAccessible( true );
+                Map cache = (Map) f.get( projectBuilder );
+                cache.clear();
+
+                f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
+                f.setAccessible( true );
+                cache = (Map) f.get( projectBuilder );
+                cache.clear();
+            }
+        }
+        catch ( NoSuchFieldException e )
+        {
+            throw new RuntimeException( e );
+        }
+        catch ( IllegalAccessException e )
+        {
+            throw new RuntimeException( e );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java
new file mode 100644 (file)
index 0000000..fb99ad7
--- /dev/null
@@ -0,0 +1,85 @@
+package org.apache.maven.archiva.reporting.executor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.store.ReportingStoreException;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+
+import java.util.List;
+
+/**
+ * Executes a report or report group.
+ */
+public interface ReportExecutor
+{
+    /**
+     * Plexus component role name.
+     */
+    String ROLE = ReportExecutor.class.getName();
+
+    /**
+     * Run reports on a set of metadata.
+     *
+     * @param reportGroup the report set to run
+     * @param metadata    the RepositoryMetadata objects to report on
+     * @param repository  the repository that they come from
+     * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException if there is a problem reading/writing the report database
+     */
+    public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
+        throws ReportingStoreException;
+
+    /**
+     * Run reports on a set of artifacts.
+     *
+     * @param reportGroup the report set to run
+     * @param artifacts   the Artifact objects to report on
+     * @param repository  the repository that they come from
+     * @throws ReportingStoreException if there is a problem reading/writing the report database
+     */
+    public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
+        throws ReportingStoreException;
+
+    /**
+     * Get the report database in use for a given repository.
+     *
+     * @param repository  the repository
+     * @param reportGroup the report set to run
+     * @return the report database
+     * @throws ReportingStoreException if there is a problem reading the report database
+     */
+    ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
+        throws ReportingStoreException;
+
+    /**
+     * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
+     *
+     * @param repository          the repository to run from
+     * @param blacklistedPatterns the patterns to exclude during discovery
+     * @param filter              the filter to use during discovery to get a consistent list of artifacts
+     * @param reportGroup         the report set to run
+     * @throws ReportingStoreException if there is a problem reading/writing the report database
+     * @throws org.apache.maven.archiva.discoverer.DiscovererException
+     *                                 if there is a problem finding the artifacts and metadata to report on
+     */
+    public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
+                            ArtifactFilter filter )
+        throws DiscovererException, ReportingStoreException;
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java
new file mode 100644 (file)
index 0000000..6a7b89f
--- /dev/null
@@ -0,0 +1,40 @@
+package org.apache.maven.archiva.reporting.filter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * Implementation of a reporting filter. Artifacts already in the database are ignored.
+ */
+public class ReportingMetadataFilter
+    implements MetadataFilter
+{
+    private final ReportingDatabase reporter;
+
+    public ReportingMetadataFilter( ReportingDatabase reporter )
+    {
+        this.reporter = reporter;
+    }
+
+    public boolean include( RepositoryMetadata metadata, long timestamp )
+    {
+        return !reporter.isMetadataUpToDate( metadata, timestamp );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java
new file mode 100644 (file)
index 0000000..25139b9
--- /dev/null
@@ -0,0 +1,82 @@
+package org.apache.maven.archiva.reporting.group;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * Basic functionality for all report groups.
+ */
+public abstract class AbstractReportGroup
+    implements ReportGroup
+{
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor"
+     */
+    private Map artifactReports;
+
+    /**
+     * @plexus.requirement role="org.apache.maven.archiva.reporting.processor.MetadataReportProcessor"
+     */
+    private Map metadataReports;
+
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
+    {
+        for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
+        {
+            Map.Entry entry = (Map.Entry) i.next();
+
+            if ( includeReport( (String) entry.getKey() ) )
+            {
+                ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
+
+                report.processArtifact( artifact, model, reportingDatabase );
+            }
+        }
+    }
+
+    public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
+                                 ReportingDatabase reportingDatabase )
+    {
+        for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
+        {
+            Map.Entry entry = (Map.Entry) i.next();
+
+            if ( includeReport( (String) entry.getKey() ) )
+            {
+                MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
+
+                report.processMetadata( repositoryMetadata, repository, reportingDatabase );
+            }
+        }
+    }
+
+    public String toString()
+    {
+        return getName();
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java
new file mode 100644 (file)
index 0000000..1a0d9ed
--- /dev/null
@@ -0,0 +1,69 @@
+package org.apache.maven.archiva.reporting.group;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.group.AbstractReportGroup;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * The default report set, for repository health.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health"
+ * @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
+ */
+public class DefaultReportGroup
+    extends AbstractReportGroup
+{
+    /**
+     * Role hints of the reports to include in this set.
+     */
+    private static final Map reports = new LinkedHashMap();
+
+    static
+    {
+        reports.put( "checksum", "Checksum Problems" );
+        reports.put( "dependency", "Dependency Problems" );
+        // TODO re-enable duplicate, once a way to populate the index is determined!
+//        reports.put( "duplicate", "Duplicate Artifact Problems" );
+        reports.put( "invalid-pom", "POM Problems" );
+        reports.put( "bad-metadata", "Metadata Problems" );
+        reports.put( "checksum-metadata", "Metadata Checksum Problems" );
+        reports.put( "artifact-location", "Artifact Location Problems" );
+    }
+
+    public boolean includeReport( String key )
+    {
+        return reports.containsKey( key );
+    }
+
+    public Map getReports()
+    {
+        return reports;
+    }
+
+    public String getName()
+    {
+        return "Repository Health";
+    }
+
+    public String getFilename()
+    {
+        return "health-report.xml";
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/OldArtifactReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/OldArtifactReportGroup.java
new file mode 100644 (file)
index 0000000..0a6fae9
--- /dev/null
@@ -0,0 +1,64 @@
+package org.apache.maven.archiva.reporting.group;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.group.AbstractReportGroup;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * The report set for finding old artifacts (both snapshot and release)
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="old-artifact"
+ */
+public class OldArtifactReportGroup
+    extends AbstractReportGroup
+{
+    /**
+     * Role hints of the reports to include in this set.
+     *
+     * @todo implement these report processors!
+     */
+    private static final Map reports = new LinkedHashMap();
+
+    static
+    {
+        reports.put( "old-artifact", "Old Artifacts" );
+        reports.put( "old-snapshot-artifact", "Old Snapshot Artifacts" );
+    }
+
+    public boolean includeReport( String key )
+    {
+        return reports.containsKey( key );
+    }
+
+    public Map getReports()
+    {
+        return reports;
+    }
+
+    public String getFilename()
+    {
+        return "old-artifacts-report.xml";
+    }
+
+    public String getName()
+    {
+        return "Old Artifacts";
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java
new file mode 100644 (file)
index 0000000..8cb408d
--- /dev/null
@@ -0,0 +1,86 @@
+package org.apache.maven.archiva.reporting.group;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+import java.util.Map;
+
+/**
+ * A grouping or report processors for execution as a visible report from the web interface - eg, "health",
+ * "old artifacts", etc.
+ */
+public interface ReportGroup
+{
+    /**
+     * Plexus component role.
+     */
+    String ROLE = ReportGroup.class.getName();
+
+    /**
+     * Run any artifact related reports in the report set.
+     *
+     * @param artifact          the artifact to process
+     * @param model             the POM associated with the artifact to process
+     * @param reportingDatabase the report database to store results in
+     */
+    void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
+
+    /**
+     * Run any metadata related reports in the report set.
+     *
+     * @param repositoryMetadata the metadata to process
+     * @param repository         the repository the metadata is located in
+     * @param reportingDatabase  the report database to store results in
+     */
+    void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
+                          ReportingDatabase reportingDatabase );
+
+    /**
+     * Whether a report with the given role hint is included in this report set.
+     *
+     * @param key the report role hint.
+     * @return whether the report is included
+     */
+    boolean includeReport( String key );
+
+    /**
+     * Get the report processors in this set. The map is keyed by the report's role hint, and the value is it's
+     * display name.
+     *
+     * @return the reports
+     */
+    Map getReports();
+
+    /**
+     * Get the user-friendly name of this report.
+     *
+     * @return the report name
+     */
+    String getName();
+
+    /**
+     * Get the filename of the reports within the repository's reports directory.
+     *
+     * @return the filename
+     */
+    String getFilename();
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..8413cb7
--- /dev/null
@@ -0,0 +1,32 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+/**
+ * This interface will be called by the main system for each artifact as it is discovered. This is how each of the
+ * different types of reports are implemented.
+ */
+public interface ArtifactReportProcessor
+{
+    String ROLE = ArtifactReportProcessor.class.getName();
+
+    void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java
new file mode 100644 (file)
index 0000000..7aae729
--- /dev/null
@@ -0,0 +1,346 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.layer.RepositoryQueryLayer;
+import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.Plugin;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This class will report on bad metadata files.  These include invalid version declarations and incomplete version
+ * information inside the metadata file.  Plugin metadata will be checked for validity of the latest plugin artifacts.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.MetadataReportProcessor" role-hint="bad-metadata"
+ */
+public class BadMetadataReportProcessor
+    implements MetadataReportProcessor
+{
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactFactory artifactFactory;
+
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
+
+    private static final String ROLE_HINT = "bad-metadata";
+
+    /**
+     * Process the metadata encountered in the repository and report all errors found, if any.
+     *
+     * @param metadata   the metadata to be processed.
+     * @param repository the repository where the metadata was encountered
+     * @param reporter   the ReportingDatabase to receive processing results
+     */
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+                                 ReportingDatabase reporter )
+    {
+        if ( metadata.storedInGroupDirectory() )
+        {
+            try
+            {
+                checkPluginMetadata( metadata, repository, reporter );
+            }
+            catch ( IOException e )
+            {
+                addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
+            }
+        }
+        else
+        {
+            Versioning versioning = metadata.getMetadata().getVersioning();
+            boolean found = false;
+            if ( versioning != null )
+            {
+                String lastUpdated = versioning.getLastUpdated();
+                if ( lastUpdated != null && lastUpdated.length() != 0 )
+                {
+                    found = true;
+                }
+            }
+            if ( !found )
+            {
+                addFailure( reporter, metadata, "missing-last-updated",
+                            "Missing lastUpdated element inside the metadata." );
+            }
+
+            if ( metadata.storedInArtifactVersionDirectory() )
+            {
+                checkSnapshotMetadata( metadata, repository, reporter );
+            }
+            else
+            {
+                checkMetadataVersions( metadata, repository, reporter );
+
+                try
+                {
+                    checkRepositoryVersions( metadata, repository, reporter );
+                }
+                catch ( IOException e )
+                {
+                    String reason = "Error getting plugin artifact directories versions: " + e;
+                    addWarning( reporter, metadata, null, reason );
+                }
+            }
+        }
+    }
+
+    private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+                                    String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+    }
+
+    /**
+     * Method for processing a GroupRepositoryMetadata
+     *
+     * @param metadata   the metadata to be processed.
+     * @param repository the repository where the metadata was encountered
+     * @param reporter   the ReportingDatabase to receive processing results
+     */
+    private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+                                      ReportingDatabase reporter )
+        throws IOException
+    {
+        File metadataDir =
+            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+        List pluginDirs = getArtifactIdFiles( metadataDir );
+
+        Map prefixes = new HashMap();
+        for ( Iterator plugins = metadata.getMetadata().getPlugins().iterator(); plugins.hasNext(); )
+        {
+            Plugin plugin = (Plugin) plugins.next();
+
+            String artifactId = plugin.getArtifactId();
+            if ( artifactId == null || artifactId.length() == 0 )
+            {
+                addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+                            "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
+            }
+
+            String prefix = plugin.getPrefix();
+            if ( prefix == null || prefix.length() == 0 )
+            {
+                addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+                            "Missing or empty plugin prefix for artifactId " + artifactId + "." );
+            }
+            else
+            {
+                if ( prefixes.containsKey( prefix ) )
+                {
+                    addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
+                                "Duplicate plugin prefix found: " + prefix + "." );
+                }
+                else
+                {
+                    prefixes.put( prefix, plugin );
+                }
+            }
+
+            if ( artifactId != null && artifactId.length() > 0 )
+            {
+                File pluginDir = new File( metadataDir, artifactId );
+                if ( !pluginDirs.contains( pluginDir ) )
+                {
+                    addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
+                                "Metadata plugin " + artifactId + " not found in the repository" );
+                }
+                else
+                {
+                    pluginDirs.remove( pluginDir );
+                }
+            }
+        }
+
+        if ( pluginDirs.size() > 0 )
+        {
+            for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
+            {
+                File plugin = (File) plugins.next();
+                addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
+                    plugin.getName() + " is present in the repository but " + "missing in the metadata." );
+            }
+        }
+    }
+
+    /**
+     * Method for processing a SnapshotArtifactRepository
+     *
+     * @param metadata   the metadata to be processed.
+     * @param repository the repository where the metadata was encountered
+     * @param reporter   the ReportingDatabase to receive processing results
+     */
+    private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+                                        ReportingDatabase reporter )
+    {
+        RepositoryQueryLayer repositoryQueryLayer =
+            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+
+        Versioning versioning = metadata.getMetadata().getVersioning();
+        if ( versioning != null )
+        {
+            Snapshot snapshot = versioning.getSnapshot();
+
+            String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
+                                                  snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
+            Artifact artifact =
+                artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+            artifact.isSnapshot(); // trigger baseVersion correction
+
+            if ( !repositoryQueryLayer.containsArtifact( artifact ) )
+            {
+                addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
+                            "Snapshot artifact " + version + " does not exist." );
+            }
+        }
+    }
+
+    /**
+     * Method for validating the versions declared inside an ArtifactRepositoryMetadata
+     *
+     * @param metadata   the metadata to be processed.
+     * @param repository the repository where the metadata was encountered
+     * @param reporter   the ReportingDatabase to receive processing results
+     */
+    private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+                                        ReportingDatabase reporter )
+    {
+        RepositoryQueryLayer repositoryQueryLayer =
+            repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+
+        Versioning versioning = metadata.getMetadata().getVersioning();
+        if ( versioning != null )
+        {
+            for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
+            {
+                String version = (String) versions.next();
+
+                Artifact artifact =
+                    artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+
+                if ( !repositoryQueryLayer.containsArtifact( artifact ) )
+                {
+                    addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
+                        version + " is present in metadata but " + "missing in the repository." );
+                }
+            }
+        }
+    }
+
+    /**
+     * Searches the artifact repository directory for all versions and verifies that all of them are listed in the
+     * ArtifactRepositoryMetadata
+     *
+     * @param metadata   the metadata to be processed.
+     * @param repository the repository where the metadata was encountered
+     * @param reporter   the ReportingDatabase to receive processing results
+     * @throws java.io.IOException if there is a problem reading from the file system
+     */
+    private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+                                          ReportingDatabase reporter )
+        throws IOException
+    {
+        Versioning versioning = metadata.getMetadata().getVersioning();
+        List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
+        File versionsDir =
+            new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+
+        // TODO: I don't know how this condition can happen, but it was seen on the main repository.
+        // Avoid hard failure
+        if ( versionsDir.exists() )
+        {
+            List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
+            for ( Iterator i = versions.iterator(); i.hasNext(); )
+            {
+                File path = new File( (String) i.next() );
+                String version = path.getParentFile().getName();
+                if ( !metadataVersions.contains( version ) )
+                {
+                    addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
+                        version + " found in the repository but " + "missing in the metadata." );
+                }
+            }
+        }
+        else
+        {
+            addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
+        }
+    }
+
+    /**
+     * Used to gather artifactIds from a groupId directory.
+     *
+     * @param groupIdDir the directory of the group
+     * @return the list of artifact ID File objects for each directory
+     * @throws IOException if there was a failure to read the directories
+     */
+    private List getArtifactIdFiles( File groupIdDir )
+        throws IOException
+    {
+        List artifactIdFiles = new ArrayList();
+
+        File[] files = groupIdDir.listFiles();
+        if ( files != null )
+        {
+            for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
+            {
+                File artifactDir = (File) i.next();
+
+                if ( artifactDir.isDirectory() )
+                {
+                    List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
+                    if ( versions.size() > 0 )
+                    {
+                        artifactIdFiles.add( artifactDir );
+                    }
+                }
+            }
+        }
+
+        return artifactIdFiles;
+    }
+
+    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+                                    String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..5dc3e26
--- /dev/null
@@ -0,0 +1,103 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.digest.DigesterException;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * This class reports invalid and mismatched checksums of artifacts and metadata files.
+ * It validates MD5 and SHA-1 checksums.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="checksum"
+ */
+public class ChecksumArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    /**
+     * @plexus.requirement role-hint="sha1"
+     */
+    private Digester sha1Digester;
+
+    /**
+     * @plexus.requirement role-hint="md5"
+     */
+    private Digester md5Digester;
+
+    private static final String ROLE_HINT = "checksum";
+
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        //check if checksum files exist
+        String path = repository.pathOf( artifact );
+        File file = new File( repository.getBasedir(), path );
+
+        // TODO: make md5 configurable
+//        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+    }
+
+    private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
+                                 ReportingDatabase reporter, Artifact artifact )
+    {
+        File checksumFile = new File( repository.getBasedir(), path );
+        if ( checksumFile.exists() )
+        {
+            try
+            {
+                digester.verify( file, FileUtils.fileRead( checksumFile ) );
+            }
+            catch ( DigesterException e )
+            {
+                addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
+            }
+            catch ( IOException e )
+            {
+                addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
+            }
+        }
+        else
+        {
+            addFailure( reporter, artifact, "checksum-missing",
+                        digester.getAlgorithm() + " checksum file does not exist." );
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java
new file mode 100644 (file)
index 0000000..81a558e
--- /dev/null
@@ -0,0 +1,106 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.digest.DigesterException;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * This class reports invalid and mismatched checksums of artifacts and metadata files.
+ * It validates MD5 and SHA-1 checksums.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.MetadataReportProcessor" role-hint="checksum-metadata"
+ */
+public class ChecksumMetadataReportProcessor
+    implements MetadataReportProcessor
+{
+    /**
+     * @plexus.requirement role-hint="sha1"
+     */
+    private Digester sha1Digester;
+
+    /**
+     * @plexus.requirement role-hint="md5"
+     */
+    private Digester md5Digester;
+
+    private static final String ROLE_HINT = "checksum-metadata";
+
+    /**
+     * Validate the checksums of the metadata. Get the metadata file from the
+     * repository then validate the checksum.
+     */
+    public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+                                 ReportingDatabase reporter )
+    {
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        //check if checksum files exist
+        String path = repository.pathOfRemoteRepositoryMetadata( metadata );
+        File file = new File( repository.getBasedir(), path );
+
+        verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
+        verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+    }
+
+    private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
+                                 ReportingDatabase reporter, RepositoryMetadata metadata )
+    {
+        File checksumFile = new File( repository.getBasedir(), path );
+        if ( checksumFile.exists() )
+        {
+            try
+            {
+                digester.verify( file, FileUtils.fileRead( checksumFile ) );
+            }
+            catch ( DigesterException e )
+            {
+                addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
+            }
+            catch ( IOException e )
+            {
+                addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
+            }
+        }
+        else
+        {
+            addFailure( reporter, metadata, "checksum-missing",
+                        digester.getAlgorithm() + " checksum file does not exist." );
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+                                    String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+    }
+
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..2ed20aa
--- /dev/null
@@ -0,0 +1,160 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.layer.RepositoryQueryLayer;
+import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.Model;
+
+import java.text.MessageFormat;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="dependency"
+ */
+public class DependencyArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactFactory artifactFactory;
+
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryQueryLayerFactory layerFactory;
+
+    private static final String POM = "pom";
+
+    private static final String ROLE_HINT = "dependency";
+
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
+        if ( !queryLayer.containsArtifact( artifact ) )
+        {
+            // TODO: is this even possible?
+            addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+        }
+
+        if ( model != null && POM.equals( artifact.getType() ) )
+        {
+            List dependencies = model.getDependencies();
+            processDependencies( dependencies, reporter, queryLayer, artifact );
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+    }
+
+    private void processDependencies( List dependencies, ReportingDatabase reporter,
+                                      RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
+    {
+        if ( dependencies.size() > 0 )
+        {
+            Iterator iterator = dependencies.iterator();
+            while ( iterator.hasNext() )
+            {
+                Dependency dependency = (Dependency) iterator.next();
+
+                try
+                {
+                    Artifact artifact = createArtifact( dependency );
+
+                    // TODO: handle ranges properly. We should instead be mapping out all the artifacts in the
+                    // repository and mapping out the graph
+
+                    if ( artifact.getVersion() == null )
+                    {
+                        // it was a range, for now presume it exists
+                        continue;
+                    }
+
+                    if ( !repositoryQueryLayer.containsArtifact( artifact ) )
+                    {
+                        String reason = MessageFormat.format(
+                            "Artifact''s dependency {0} does not exist in the repository",
+                            new String[]{getDependencyString( dependency )} );
+                        addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
+                                    reason );
+                    }
+                }
+                catch ( InvalidVersionSpecificationException e )
+                {
+                    String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
+                                                          new String[]{getDependencyString( dependency ),
+                                                              dependency.getVersion()} );
+                    addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
+                }
+            }
+        }
+    }
+
+    private String getDependencyKey( Dependency dependency )
+    {
+        String str = dependency.getGroupId();
+        str += ":" + dependency.getArtifactId();
+        str += ":" + dependency.getVersion();
+        str += ":" + dependency.getType();
+        if ( dependency.getClassifier() != null )
+        {
+            str += ":" + dependency.getClassifier();
+        }
+        return str;
+    }
+
+    static String getDependencyString( Dependency dependency )
+    {
+        String str = "(group=" + dependency.getGroupId();
+        str += ", artifact=" + dependency.getArtifactId();
+        str += ", version=" + dependency.getVersion();
+        str += ", type=" + dependency.getType();
+        if ( dependency.getClassifier() != null )
+        {
+            str += ", classifier=" + dependency.getClassifier();
+        }
+        str += ")";
+        return str;
+    }
+
+    private Artifact createArtifact( Dependency dependency )
+        throws InvalidVersionSpecificationException
+    {
+        VersionRange spec = VersionRange.createFromVersionSpec( dependency.getVersion() );
+
+        if ( spec == null )
+        {
+            throw new InvalidVersionSpecificationException( "Dependency version was null" );
+        }
+
+        return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
+                                                         dependency.getType(), dependency.getClassifier(),
+                                                         dependency.getScope() );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java
new file mode 100644 (file)
index 0000000..a5f8846
--- /dev/null
@@ -0,0 +1,137 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
+import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
+import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Validates an artifact file for duplicates within the same groupId based from what's available in a repository index.
+ *
+ * @author Edwin Punzalan
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="duplicate"
+ */
+public class DuplicateArtifactFileReportProcessor
+    implements ArtifactReportProcessor
+{
+    /**
+     * @plexus.requirement role-hint="md5"
+     */
+    private Digester digester;
+
+    /**
+     * @plexus.requirement
+     */
+    private RepositoryArtifactIndexFactory indexFactory;
+
+    /**
+     * @plexus.configuration
+     */
+    private String indexDirectory;
+
+    private static final String ROLE_HINT = "duplicate";
+
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+        if ( artifact.getFile() != null )
+        {
+            System.out.println( "indexDirectory = " + indexDirectory );
+            
+            File indexDirectoryFile = new File( indexDirectory );
+
+            RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexDirectoryFile );
+
+            String checksum = null;
+            try
+            {
+                checksum = digester.calc( artifact.getFile() );
+            }
+            catch ( DigesterException e )
+            {
+                addWarning( reporter, artifact, null,
+                            "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+            }
+
+            if ( checksum != null )
+            {
+                try
+                {
+                    List results = index.search( new LuceneQuery(
+                        new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+
+                    if ( !results.isEmpty() )
+                    {
+                        for ( Iterator i = results.iterator(); i.hasNext(); )
+                        {
+                            StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
+
+                            //make sure it is not the same artifact
+                            if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
+                            {
+                                //report only duplicates from the same groupId
+                                String groupId = artifact.getGroupId();
+                                if ( groupId.equals( result.getGroupId() ) )
+                                {
+                                    addFailure( reporter, artifact, "duplicate",
+                                                 "Found duplicate for " + artifact.getId() );
+                                }
+                            }
+                        }
+                    }
+                }
+                catch ( RepositoryIndexSearchException e )
+                {
+                    addWarning( reporter, artifact, null, "Failed to search in index" + e );
+                }
+            }
+        }
+        else
+        {
+            addWarning( reporter, artifact, null, "Artifact file is null" );
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+    }
+
+    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..19107c2
--- /dev/null
@@ -0,0 +1,101 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * This class validates well-formedness of pom xml file.
+ *
+ * @todo nice to have this a specific, tested report - however it is likely to double up with project building exceptions from IndexerTask. Resolve [!]
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="invalid-pom"
+ */
+public class InvalidPomArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    private static final String ROLE_HINT = "invalid-pom";
+
+    /**
+     * @param artifact The pom xml file to be validated, passed as an artifact object.
+     * @param reporter The artifact reporter object.
+     */
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        if ( "pom".equals( artifact.getType().toLowerCase() ) )
+        {
+            File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
+
+            if ( !f.exists() )
+            {
+                addFailure( reporter, artifact, "pom-missing", "POM not found." );
+            }
+            else
+            {
+                Reader reader = null;
+
+                MavenXpp3Reader pomReader = new MavenXpp3Reader();
+
+                try
+                {
+                    reader = new FileReader( f );
+                    pomReader.read( reader );
+                }
+                catch ( XmlPullParserException e )
+                {
+                    addFailure( reporter, artifact, "pom-parse-exception",
+                                "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
+                }
+                catch ( IOException e )
+                {
+                    addFailure( reporter, artifact, "pom-io-exception",
+                                "Error while reading the pom xml file: " + e.getMessage() );
+                }
+                finally
+                {
+                    IOUtil.close( reader );
+                }
+            }
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..c297e1c
--- /dev/null
@@ -0,0 +1,246 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+/**
+ * Validate the location of the artifact based on the values indicated
+ * in its pom (both the pom packaged with the artifact & the pom in the
+ * file system).
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="artifact-location"
+ */
+public class LocationArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    /**
+     * @plexus.requirement
+     */
+    private ArtifactFactory artifactFactory;
+
+    // TODO: share with other code with the same
+    private static final Set JAR_FILE_TYPES =
+        new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+
+    /**
+     * @plexus.requirement
+     */
+    private MavenProjectBuilder projectBuilder;
+
+    private static final String POM = "pom";
+
+    private static final String ROLE_HINT = "artifact-location";
+
+    /**
+     * Check whether the artifact is in its proper location. The location of the artifact
+     * is validated first against the groupId, artifactId and versionId in the specified model
+     * object (pom in the file system). Then unpack the artifact (jar file) and get the model (pom)
+     * included in the package. If a model exists inside the package, then check if the artifact's
+     * location is valid based on the location specified in the pom. Check if the both the location
+     * specified in the file system pom and in the pom included in the package is the same.
+     */
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        adjustDistributionArtifactHandler( artifact );
+
+        String artifactPath = repository.pathOf( artifact );
+
+        if ( model != null )
+        {
+            // only check if it is a standalone POM, or an artifact other than a POM
+            // ie, don't check the location of the POM for another artifact matches that of the artifact
+            if ( !POM.equals( artifact.getType() ) || POM.equals( model.getPackaging() ) )
+            {
+                //check if the artifact is located in its proper location based on the info
+                //specified in the model object/pom
+                Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
+                                                                                       model.getArtifactId(),
+                                                                                       model.getVersion(),
+                                                                                       artifact.getType(),
+                                                                                       artifact.getClassifier() );
+
+                adjustDistributionArtifactHandler( modelArtifact );
+                String modelPath = repository.pathOf( modelArtifact );
+                if ( !modelPath.equals( artifactPath ) )
+                {
+                    addFailure( reporter, artifact, "repository-pom-location",
+                                "The artifact is out of place. It does not match the specified location in the repository pom: " +
+                                    modelPath );
+                }
+            }
+        }
+
+        // get the location of the artifact itself
+        File file = new File( repository.getBasedir(), artifactPath );
+
+        if ( file.exists() )
+        {
+            if ( JAR_FILE_TYPES.contains( artifact.getType() ) )
+            {
+                //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
+                //check if the pom is included in the package
+                Model extractedModel = readArtifactModel( file, artifact, reporter );
+
+                if ( extractedModel != null )
+                {
+                    Artifact extractedArtifact = artifactFactory.createBuildArtifact( extractedModel.getGroupId(),
+                                                                                      extractedModel.getArtifactId(),
+                                                                                      extractedModel.getVersion(),
+                                                                                      extractedModel.getPackaging() );
+                    if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
+                    {
+                        addFailure( reporter, artifact, "packaged-pom-location",
+                                    "The artifact is out of place. It does not match the specified location in the packaged pom." );
+                    }
+                }
+            }
+        }
+        else
+        {
+            addFailure( reporter, artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
+        }
+    }
+
+    private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+    }
+
+    private static void adjustDistributionArtifactHandler( Artifact artifact )
+    {
+        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
+        if ( "distribution-zip".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
+        }
+        else if ( "distribution-tgz".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
+        }
+    }
+
+    private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
+    {
+        Model model = null;
+
+        JarFile jar = null;
+        try
+        {
+            jar = new JarFile( file );
+
+            //Get the entry and its input stream.
+            JarEntry entry = jar.getJarEntry(
+                "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
+
+            // If the entry is not null, extract it.
+            if ( entry != null )
+            {
+                model = readModel( jar.getInputStream( entry ) );
+
+                if ( model.getGroupId() == null )
+                {
+                    model.setGroupId( model.getParent().getGroupId() );
+                }
+                if ( model.getVersion() == null )
+                {
+                    model.setVersion( model.getParent().getVersion() );
+                }
+            }
+        }
+        catch ( IOException e )
+        {
+            addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
+        }
+        catch ( XmlPullParserException e )
+        {
+            addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
+        }
+        finally
+        {
+            if ( jar != null )
+            {
+                //noinspection UnusedCatchParameter
+                try
+                {
+                    jar.close();
+                }
+                catch ( IOException e )
+                {
+                    // ignore
+                }
+            }
+        }
+        return model;
+    }
+
+    private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addWarning( artifact, ROLE_HINT, null, reason );
+    }
+
+    private Model readModel( InputStream entryStream )
+        throws IOException, XmlPullParserException
+    {
+        Reader isReader = new InputStreamReader( entryStream );
+
+        Model model;
+        try
+        {
+            MavenXpp3Reader pomReader = new MavenXpp3Reader();
+            model = pomReader.read( isReader );
+        }
+        finally
+        {
+            IOUtil.close( isReader );
+        }
+        return model;
+    }
+
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java
new file mode 100644 (file)
index 0000000..12f57e9
--- /dev/null
@@ -0,0 +1,31 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+/**
+ * This interface is called by the main system for each piece of metadata as it is discovered.
+ */
+public interface MetadataReportProcessor
+{
+    String ROLE = MetadataReportProcessor.class.getName();
+
+    void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..8df0f70
--- /dev/null
@@ -0,0 +1,91 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+import java.io.File;
+
+/**
+ * Find artifacts in the repository that are considered old.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="old-artifact"
+ * @todo make this configurable from the web interface
+ */
+public class OldArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    private static final String ROLE_HINT = "old-artifact";
+
+    /**
+     * The maximum age of an artifact before it is reported old, specified in seconds. The default is 1 year.
+     *
+     * @plexus.configuration default-value="31536000"
+     */
+    private int maxAge;
+
+    public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        adjustDistributionArtifactHandler( artifact );
+
+        String artifactPath = repository.pathOf( artifact );
+
+        //get the location of the artifact itself
+        File file = new File( repository.getBasedir(), artifactPath );
+
+        if ( file.exists() )
+        {
+            if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
+            {
+                // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+                reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
+                                    "The artifact is older than the maximum age of " + maxAge + " seconds." );
+            }
+        }
+        else
+        {
+            throw new IllegalStateException( "Couldn't find artifact " + file );
+        }
+    }
+
+    private static void adjustDistributionArtifactHandler( Artifact artifact )
+    {
+        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
+        if ( "distribution-zip".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
+        }
+        else if ( "distribution-tgz".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java
new file mode 100644 (file)
index 0000000..b067e6c
--- /dev/null
@@ -0,0 +1,181 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.regex.Matcher;
+
+/**
+ * Find snapshot artifacts in the repository that are considered old.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor" role-hint="old-snapshot-artifact"
+ * @todo make this configurable from the web interface
+ */
+public class OldSnapshotArtifactReportProcessor
+    implements ArtifactReportProcessor
+{
+    private static final String ROLE_HINT = "old-snapshot-artifact";
+
+    /**
+     * The maximum age of an artifact before it is reported old, specified in seconds. The default is 1 year.
+     *
+     * @plexus.configuration default-value="31536000"
+     */
+    private int maxAge;
+
+    /**
+     * The maximum number of snapshots to retain within a given version. The default is 0, which keeps all snapshots
+     * that are within the age limits.
+     *
+     * @plexus.configuration default-value="0"
+     */
+    private int maxSnapshots;
+
+    public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
+    {
+        ArtifactRepository repository = artifact.getRepository();
+
+        if ( !"file".equals( repository.getProtocol() ) )
+        {
+            // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+            throw new UnsupportedOperationException(
+                "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+        }
+
+        adjustDistributionArtifactHandler( artifact );
+
+        String artifactPath = repository.pathOf( artifact );
+
+        //get the location of the artifact itself
+        File file = new File( repository.getBasedir(), artifactPath );
+
+        if ( file.exists() )
+        {
+            if ( artifact.isSnapshot() )
+            {
+                Matcher m = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
+                if ( m.matches() )
+                {
+                    long timestamp;
+                    try
+                    {
+                        timestamp = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).parse( m.group( 2 ) ).getTime();
+                    }
+                    catch ( ParseException e )
+                    {
+                        throw new IllegalStateException(
+                            "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
+                    }
+
+                    if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
+                    {
+                        addNotice( reporter, artifact, "snapshot-expired-time",
+                                   "The artifact is older than the maximum age of " + maxAge + " seconds." );
+                    }
+                    else if ( maxSnapshots > 0 )
+                    {
+                        File[] files = file.getParentFile().listFiles( new FilenameFilter()
+                        {
+                            public boolean accept( File file, String string )
+                            {
+                                return string.startsWith( artifact.getArtifactId() + "-" ) &&
+                                    string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
+                            }
+                        } );
+
+                        List/*<Integer>*/ buildNumbers = new ArrayList();
+                        Integer currentBuild = null;
+                        for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
+                        {
+                            File f = (File) i.next();
+
+                            // trim to version
+                            int startIndex = artifact.getArtifactId().length() + 1;
+                            int extensionLength = artifact.getArtifactHandler().getExtension().length() + 1;
+                            int endIndex = f.getName().length() - extensionLength;
+                            String name = f.getName().substring( startIndex, endIndex );
+
+                            Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( name );
+
+                            if ( matcher.matches() )
+                            {
+                                Integer buildNumber = Integer.valueOf( matcher.group( 3 ) );
+
+                                buildNumbers.add( buildNumber );
+                                if ( name.equals( artifact.getVersion() ) )
+                                {
+                                    currentBuild = buildNumber;
+                                }
+                            }
+                        }
+
+                        // Prune back to expired build numbers
+                        Collections.sort( buildNumbers );
+                        for ( int i = 0; i < maxSnapshots && !buildNumbers.isEmpty(); i++ )
+                        {
+                            buildNumbers.remove( buildNumbers.size() - 1 );
+                        }
+
+                        if ( buildNumbers.contains( currentBuild ) )
+                        {
+                            addNotice( reporter, artifact, "snapshot-expired-count",
+                                       "The artifact is older than the maximum number of retained snapshot builds." );
+                        }
+                    }
+                }
+            }
+        }
+        else
+        {
+            throw new IllegalStateException( "Couldn't find artifact " + file );
+        }
+    }
+
+    private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+    {
+        // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+        reporter.addNotice( artifact, ROLE_HINT, problem, reason );
+    }
+
+    private static void adjustDistributionArtifactHandler( Artifact artifact )
+    {
+        // need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
+        if ( "distribution-zip".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "zip" ) );
+        }
+        else if ( "distribution-tgz".equals( artifact.getType() ) )
+        {
+            artifact.setArtifactHandler( new DefaultArtifactHandler( "tar.gz" ) );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java
new file mode 100644 (file)
index 0000000..d1c1f55
--- /dev/null
@@ -0,0 +1,142 @@
+package org.apache.maven.archiva.reporting.store;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.Reporting;
+import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
+import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.store.ReportingStore;
+import org.apache.maven.archiva.reporting.store.ReportingStoreException;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
+ * can continue to be used.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
+ * @todo support other implementations than XML file
+ * @plexus.component
+ */
+public class DefaultReportingStore
+    extends AbstractLogEnabled
+    implements ReportingStore
+{
+    /**
+     * The cached reports for given repositories.
+     */
+    private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
+
+    public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
+        throws ReportingStoreException
+    {
+        String key = getKey( repository, reportGroup );
+        ReportingDatabase database = (ReportingDatabase) reports.get( key );
+
+        if ( database == null )
+        {
+            ReportingXpp3Reader reader = new ReportingXpp3Reader();
+
+            File file = getReportFilename( repository, reportGroup );
+
+            FileReader fileReader = null;
+            try
+            {
+                fileReader = new FileReader( file );
+            }
+            catch ( FileNotFoundException e )
+            {
+                database = new ReportingDatabase( reportGroup, repository );
+            }
+
+            if ( database == null )
+            {
+                getLogger().info( "Reading report database from " + file );
+                try
+                {
+                    Reporting reporting = reader.read( fileReader, false );
+                    database = new ReportingDatabase( reportGroup, reporting, repository );
+                }
+                catch ( IOException e )
+                {
+                    throw new ReportingStoreException( e.getMessage(), e );
+                }
+                catch ( XmlPullParserException e )
+                {
+                    throw new ReportingStoreException( e.getMessage(), e );
+                }
+                finally
+                {
+                    IOUtil.close( fileReader );
+                }
+            }
+
+            reports.put( key, database );
+        }
+        return database;
+    }
+
+    private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
+    {
+        return repository.getId() + "/" + reportGroup.getFilename();
+    }
+
+    private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
+    {
+        return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
+    }
+
+    public void storeReports( ReportingDatabase database, ArtifactRepository repository )
+        throws ReportingStoreException
+    {
+        database.updateTimings();
+
+        ReportingXpp3Writer writer = new ReportingXpp3Writer();
+
+        File file = getReportFilename( repository, database.getReportGroup() );
+        getLogger().info( "Writing reports to " + file );
+        FileWriter fileWriter = null;
+        try
+        {
+            file.getParentFile().mkdirs();
+
+            fileWriter = new FileWriter( file );
+            writer.write( fileWriter, database.getReporting() );
+        }
+        catch ( IOException e )
+        {
+            throw new ReportingStoreException( e.getMessage(), e );
+        }
+        finally
+        {
+            IOUtil.close( fileWriter );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java
new file mode 100644 (file)
index 0000000..b251736
--- /dev/null
@@ -0,0 +1,58 @@
+package org.apache.maven.archiva.reporting.store;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.store.ReportingStoreException;
+
+/**
+ * A component for loading the reporting database into the model.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo this is something that could possibly be generalised into Modello.
+ */
+public interface ReportingStore
+{
+    /**
+     * The Plexus role for the component.
+     */
+    String ROLE = ReportingStore.class.getName();
+
+    /**
+     * Get the reports from the store. A cached version may be used.
+     *
+     * @param repository  the repository to load the reports for
+     * @param reportGroup the report group to get the report for
+     * @return the reporting database
+     * @throws ReportingStoreException if there was a problem reading the store
+     */
+    ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
+        throws ReportingStoreException;
+
+    /**
+     * Save the reporting to the store.
+     *
+     * @param database   the reports to store
+     * @param repository the repositorry to store the reports in
+     * @throws ReportingStoreException if there was a problem writing the store
+     */
+    void storeReports( ReportingDatabase database, ArtifactRepository repository )
+        throws ReportingStoreException;
+
+}
diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java
new file mode 100644 (file)
index 0000000..84d6a3b
--- /dev/null
@@ -0,0 +1,36 @@
+package org.apache.maven.archiva.reporting.store;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occurring using the reporting store.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ReportingStoreException
+    extends Exception
+{
+    public ReportingStoreException( String message )
+    {
+        super( message );
+    }
+
+    public ReportingStoreException( String message, Throwable e )
+    {
+        super( message, e );
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AbstractChecksumArtifactReporterTestCase.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/AbstractChecksumArtifactReporterTestCase.java
deleted file mode 100644 (file)
index e7148fd..0000000
+++ /dev/null
@@ -1,283 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.IOUtil;
-
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.util.jar.JarEntry;
-import java.util.jar.JarOutputStream;
-
-/**
- * This class creates the artifact and metadata files used for testing the ChecksumArtifactReportProcessor.
- * It is extended by ChecksumArtifactReporterTest class.
- */
-public abstract class AbstractChecksumArtifactReporterTestCase
-    extends AbstractRepositoryReportsTestCase
-{
-    private static final String[] validArtifactChecksumJars = {"validArtifact-1.0"};
-
-    private static final String[] invalidArtifactChecksumJars = {"invalidArtifact-1.0"};
-
-    private static final String metadataChecksumFilename = "maven-metadata";
-
-    private Digester sha1Digest;
-
-    private Digester md5Digest;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
-        md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
-    }
-
-    /**
-     * Create checksum files.
-     *
-     * @param type The type of checksum file to be created.
-     */
-    protected void createChecksumFile( String type )
-        throws DigesterException, IOException
-    {
-        //loop through the valid artifact names..
-        if ( "VALID".equals( type ) )
-        {
-            for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
-            {
-                writeChecksumFile( "checksumTest/", validArtifactChecksumJars[i], "jar", true );
-            }
-        }
-        else if ( "INVALID".equals( type ) )
-        {
-            for ( int i = 0; i < invalidArtifactChecksumJars.length; i++ )
-            {
-                writeChecksumFile( "checksumTest/", invalidArtifactChecksumJars[i], "jar", false );
-            }
-        }
-    }
-
-    /**
-     * Create checksum files for metadata.
-     *
-     * @param type The type of checksum to be created. (Valid or invalid)
-     */
-    protected void createMetadataFile( String type )
-        throws DigesterException, IOException
-    {
-        //loop through the valid artifact names..
-        if ( "VALID".equals( type ) )
-        {
-            writeMetadataFile( "checksumTest/validArtifact/1.0/", metadataChecksumFilename, "xml", true );
-            writeMetadataFile( "checksumTest/validArtifact/", metadataChecksumFilename, "xml", true );
-            writeMetadataFile( "checksumTest/", metadataChecksumFilename, "xml", true );
-        }
-        else if ( "INVALID".equals( type ) )
-        {
-            writeMetadataFile( "checksumTest/invalidArtifact/1.0/", metadataChecksumFilename, "xml", false );
-        }
-    }
-
-    /**
-     * Create artifact together with its checksums.
-     *
-     * @param relativePath The groupId
-     * @param filename     The filename of the artifact to be created.
-     * @param type         The file type (JAR)
-     * @param isValid      Indicates whether the checksum to be created is valid or not.
-     */
-    private void writeChecksumFile( String relativePath, String filename, String type, boolean isValid )
-        throws IOException, DigesterException
-    {
-        //Initialize variables for creating jar files
-        String repoUrl = repository.getBasedir();
-
-        String dirs = filename.replace( '-', '/' );
-        //create the group level directory of the artifact
-        File dirFiles = new File( repoUrl + relativePath + dirs );
-
-        if ( dirFiles.mkdirs() )
-        {
-            // create a jar file
-            String path = repoUrl + relativePath + dirs + "/" + filename + "." + type;
-            FileOutputStream f = new FileOutputStream( path );
-            JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
-
-            // jar sample.txt
-            String filename1 = repoUrl + relativePath + dirs + "/sample.txt";
-            createSampleFile( filename1 );
-
-            BufferedReader in = new BufferedReader( new FileReader( filename1 ) );
-            out.putNextEntry( new JarEntry( filename1 ) );
-            IOUtil.copy( in, out );
-            in.close();
-            out.close();
-
-            //Create md5 and sha-1 checksum files..
-
-            File file = new File( path + ".md5" );
-            OutputStream os = new FileOutputStream( file );
-            OutputStreamWriter osw = new OutputStreamWriter( os );
-            String sum = md5Digest.calc( new File( path ) );
-            if ( !isValid )
-            {
-                osw.write( sum + "1" );
-            }
-            else
-            {
-                osw.write( sum );
-            }
-            osw.close();
-
-            file = new File( path + ".sha1" );
-            os = new FileOutputStream( file );
-            osw = new OutputStreamWriter( os );
-            String sha1sum = sha1Digest.calc( new File( path ) );
-            if ( !isValid )
-            {
-                osw.write( sha1sum + "2" );
-            }
-            else
-            {
-                osw.write( sha1sum );
-            }
-            osw.close();
-        }
-    }
-
-    /**
-     * Create metadata file together with its checksums.
-     *
-     * @param relativePath The groupId
-     * @param filename     The filename of the artifact to be created.
-     * @param type         The file type (JAR)
-     * @param isValid      Indicates whether the checksum to be created is valid or not.
-     */
-    private void writeMetadataFile( String relativePath, String filename, String type, boolean isValid )
-        throws IOException, DigesterException
-    {
-        //create checksum for the metadata file..
-        String repoUrl = repository.getBasedir();
-        String url = repository.getBasedir() + "/" + filename + "." + type;
-
-        String path = repoUrl + relativePath + filename + "." + type;
-        FileUtils.copyFile( new File( url ), new File( path ) );
-
-        //Create md5 and sha-1 checksum files..
-        File file = new File( path + ".md5" );
-        OutputStream os = new FileOutputStream( file );
-        OutputStreamWriter osw = new OutputStreamWriter( os );
-        String md5sum = md5Digest.calc( new File( path ) );
-        if ( !isValid )
-        {
-            osw.write( md5sum + "1" );
-        }
-        else
-        {
-            osw.write( md5sum );
-        }
-        osw.close();
-
-        file = new File( path + ".sha1" );
-        os = new FileOutputStream( file );
-        osw = new OutputStreamWriter( os );
-        String sha1sum = sha1Digest.calc( new File( path ) );
-        if ( !isValid )
-        {
-            osw.write( sha1sum + "2" );
-        }
-        else
-        {
-            osw.write( sha1sum );
-        }
-        osw.close();
-    }
-
-    /**
-     * Create the sample file that will be included in the jar.
-     *
-     * @param filename
-     */
-    private void createSampleFile( String filename )
-        throws IOException
-    {
-        File file = new File( filename );
-        OutputStream os = new FileOutputStream( file );
-        OutputStreamWriter osw = new OutputStreamWriter( os );
-        osw.write( "This is the content of the sample file that will be included in the jar file." );
-        osw.close();
-    }
-
-    /**
-     * Delete the test directory created in the repository.
-     *
-     * @param dir The directory to be deleted.
-     */
-    protected void deleteTestDirectory( File dir )
-    {
-        try
-        {
-            FileUtils.deleteDirectory( dir );
-        }
-        catch ( IOException e )
-        {
-            // ignore
-        }
-    }
-
-    private void deleteFile( String filename )
-    {
-        File f = new File( filename );
-        f.delete();
-    }
-
-    protected void deleteChecksumFiles( String type )
-    {
-        //delete valid checksum files of artifacts created
-        for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
-        {
-            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
-                "/" + validArtifactChecksumJars[i] + "." + type + ".md5" );
-
-            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
-                "/" + validArtifactChecksumJars[i] + "." + type + ".sha1" );
-        }
-
-        //delete valid checksum files of metadata file
-        for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
-        {
-            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
-                "/" + metadataChecksumFilename + ".xml.md5" );
-
-            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
-                "/" + metadataChecksumFilename + ".xml.sha1" );
-        }
-    }
-
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/BadMetadataReportProcessorTest.java
deleted file mode 100644 (file)
index cd9400a..0000000
+++ /dev/null
@@ -1,433 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Plugin;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-
-import java.util.Iterator;
-
-public class BadMetadataReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ArtifactFactory artifactFactory;
-
-    private MetadataReportProcessor badMetadataReportProcessor;
-
-    private ReportingDatabase reportingDatabase;
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    public void testMetadataMissingLastUpdated()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( metadata, results );
-        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testMetadataMissingVersioning()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        assertMetadata( metadata, results );
-        Result result = (Result) failures.next();
-        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
-        result = (Result) failures.next();
-        boolean alpha1First = false;
-        if ( result.getReason().indexOf( "alpha-1" ) > 0 )
-        {
-            alpha1First = true;
-        }
-        if ( alpha1First )
-        {
-            assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
-        }
-        else
-        {
-            assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
-        }
-        result = (Result) failures.next();
-        if ( !alpha1First )
-        {
-            assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
-                          result.getReason() );
-        }
-        else
-        {
-            assertEquals( "check reason",
-                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                          result.getReason() );
-        }
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testSnapshotMetadataMissingVersioning()
-    {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        assertMetadata( metadata, results );
-        Result result = (Result) failures.next();
-        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testMetadataValidVersions()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-        versioning.setLastUpdated( "20050611.202020" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertFalse( "check there are no failures", failures.hasNext() );
-    }
-
-    public void testMetadataMissingADirectory()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.setLastUpdated( "20050611.202020" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( metadata, results );
-        // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testMetadataInvalidArtifactVersion()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-        versioning.addVersion( "1.0-alpha-3" );
-        versioning.setLastUpdated( "20050611.202020" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( metadata, results );
-        // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testMoreThanOneMetadataVersionErrors()
-    {
-        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-3" );
-        versioning.setLastUpdated( "20050611.202020" );
-
-        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( metadata, results );
-        // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
-                      result.getReason() );
-        assertTrue( "check there is a 2nd failure", failures.hasNext() );
-        result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testValidPluginMetadata()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertFalse( "check there are no failures", failures.hasNext() );
-    }
-
-    public void testMissingMetadataPlugin()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testIncompletePluginMetadata()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason",
-                      "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testInvalidPluginArtifactId()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
-                      result.getReason() );
-        assertTrue( "check there is a 2nd failure", failures.hasNext() );
-        result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testInvalidPluginPrefix()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId artifactId.", result.getReason() );
-        assertTrue( "check there is a 2nd failure", failures.hasNext() );
-        result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testDuplicatePluginPrefixes()
-    {
-        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        // TODO: should be more robust
-        assertEquals( "check reason", "Duplicate plugin prefix found: default.", result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    public void testValidSnapshotMetadata()
-    {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
-
-        Snapshot snapshot = new Snapshot();
-        snapshot.setBuildNumber( 1 );
-        snapshot.setTimestamp( "20050611.202024" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertFalse( "check there are no failures", failures.hasNext() );
-    }
-
-    public void testInvalidSnapshotMetadata()
-    {
-        Artifact artifact =
-            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
-
-        Snapshot snapshot = new Snapshot();
-        snapshot.setBuildNumber( 2 );
-        snapshot.setTimestamp( "20050611.202024" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
-
-        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( metadata, results );
-        // TODO: should be more robust
-        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
-                      result.getReason() );
-        assertFalse( "check no more failures", failures.hasNext() );
-    }
-
-    private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
-    {
-        assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
-        assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
-        assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
-    }
-
-    private Plugin createMetadataPlugin( String artifactId, String prefix )
-    {
-        Plugin plugin = new Plugin();
-        plugin.setArtifactId( artifactId );
-        plugin.setName( artifactId );
-        plugin.setPrefix( prefix );
-        return plugin;
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/ChecksumArtifactReporterTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/ChecksumArtifactReporterTest.java
deleted file mode 100644 (file)
index 0830448..0000000
+++ /dev/null
@@ -1,156 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.digest.DigesterException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-
-/**
- * This class tests the ChecksumArtifactReportProcessor.
- * It extends the AbstractChecksumArtifactReporterTestCase class.
- */
-public class ChecksumArtifactReporterTest
-    extends AbstractChecksumArtifactReporterTestCase
-{
-    private ArtifactReportProcessor artifactReportProcessor;
-
-    private ReportingDatabase reportingDatabase;
-
-    private MetadataReportProcessor metadataReportProcessor;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
-        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    /**
-     * Test the ChecksumArtifactReportProcessor when the checksum files are valid.
-     */
-    public void testChecksumArtifactReporterSuccess()
-        throws DigesterException, IOException
-    {
-        createChecksumFile( "VALID" );
-        createChecksumFile( "INVALID" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the ChecksumArtifactReportProcessor when the checksum files are invalid.
-     */
-    public void testChecksumArtifactReporterFailed()
-    {
-        String s = "invalidArtifact";
-        String s1 = "1.0";
-        Artifact artifact = createArtifact( "checksumTest", s, s1 );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the valid checksum of a metadata file.
-     * The reportingDatabase should report 2 success validation.
-     */
-    public void testChecksumMetadataReporterSuccess()
-        throws DigesterException, IOException
-    {
-        createMetadataFile( "VALID" );
-        createMetadataFile( "INVALID" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        //Version level metadata
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Artifact level metadata
-        metadata = new ArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        //Group level metadata
-        metadata = new GroupRepositoryMetadata( "checksumTest" );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-    }
-
-    /**
-     * Test the corrupted checksum of a metadata file.
-     * The reportingDatabase must report 2 failures.
-     */
-    public void testChecksumMetadataReporterFailure()
-    {
-        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-    }
-
-    /**
-     * Test the conditional when the checksum files of the artifact & metadata do not exist.
-     */
-    public void testChecksumFilesDoNotExist()
-        throws DigesterException, IOException
-    {
-        createChecksumFile( "VALID" );
-        createMetadataFile( "VALID" );
-        deleteChecksumFiles( "jar" );
-
-        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-
-        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-
-        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DefaultArtifactReporterTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DefaultArtifactReporterTest.java
deleted file mode 100644 (file)
index 07d44fd..0000000
+++ /dev/null
@@ -1,365 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class DefaultArtifactReporterTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ReportingDatabase reportingDatabase;
-
-    private RepositoryMetadata metadata;
-
-    private static final String PROCESSOR = "processor";
-
-    private static final String PROBLEM = "problem";
-
-    private Artifact artifact;
-
-    public void testEmptyArtifactReporter()
-    {
-        assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-        assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
-        assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
-    }
-
-    public void testMetadataSingleFailure()
-    {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more failures", failures.hasNext() );
-    }
-
-    private void assertMetadata( MetadataResults result )
-    {
-        assertEquals( "check failure cause", metadata.getGroupId(), result.getGroupId() );
-        assertEquals( "check failure cause", metadata.getArtifactId(), result.getArtifactId() );
-        assertEquals( "check failure cause", metadata.getBaseVersion(), result.getVersion() );
-    }
-
-    public void testMetadataMultipleFailures()
-    {
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        MetadataResults results = (MetadataResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd failure", failures.hasNext() );
-        result = (Result) failures.next();
-        assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more failures", failures.hasNext() );
-    }
-
-    public void testMetadataSingleWarning()
-    {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        MetadataResults results = (MetadataResults) warnings.next();
-        warnings = results.getWarnings().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testMetadataMultipleWarnings()
-    {
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        MetadataResults results = (MetadataResults) warnings.next();
-        warnings = results.getWarnings().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "First Warning", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd warning", warnings.hasNext() );
-        result = (Result) warnings.next();
-        assertEquals( "check failure reason", "Second Warning", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testMetadataSingleNotice()
-    {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        MetadataResults results = (MetadataResults) warnings.next();
-        warnings = results.getNotices().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "Single Notice Message", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testMetadataMultipleNotices()
-    {
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getMetadataIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        MetadataResults results = (MetadataResults) warnings.next();
-        warnings = results.getNotices().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertMetadata( results );
-        assertEquals( "check failure reason", "First Notice", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd warning", warnings.hasNext() );
-        result = (Result) warnings.next();
-        assertEquals( "check failure reason", "Second Notice", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testArtifactSingleFailure()
-    {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
-        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        ArtifactResults results = (ArtifactResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more failures", failures.hasNext() );
-    }
-
-    private void assertArtifact( ArtifactResults results )
-    {
-        assertEquals( "check failure cause", artifact.getGroupId(), results.getGroupId() );
-        assertEquals( "check failure cause", artifact.getArtifactId(), results.getArtifactId() );
-        assertEquals( "check failure cause", artifact.getVersion(), results.getVersion() );
-        assertEquals( "check failure cause", artifact.getClassifier(), results.getClassifier() );
-        assertEquals( "check failure cause", artifact.getType(), results.getType() );
-    }
-
-    public void testArtifactMultipleFailures()
-    {
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
-        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
-        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        ArtifactResults results = (ArtifactResults) failures.next();
-        failures = results.getFailures().iterator();
-        assertTrue( "check there is a failure", failures.hasNext() );
-        Result result = (Result) failures.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd failure", failures.hasNext() );
-        result = (Result) failures.next();
-        assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more failures", failures.hasNext() );
-    }
-
-    public void testArtifactSingleWarning()
-    {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        ArtifactResults results = (ArtifactResults) warnings.next();
-        warnings = results.getWarnings().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testArtifactMultipleWarnings()
-    {
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
-        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        ArtifactResults results = (ArtifactResults) warnings.next();
-        warnings = results.getWarnings().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "First Warning", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd warning", warnings.hasNext() );
-        result = (Result) warnings.next();
-        assertEquals( "check failure reason", "Second Warning", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testArtifactSingleNotice()
-    {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
-        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        ArtifactResults results = (ArtifactResults) warnings.next();
-        warnings = results.getNotices().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "Single Notice Message", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    public void testArtifactMultipleNotices()
-    {
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
-        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
-        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
-        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
-
-        Iterator warnings = reportingDatabase.getArtifactIterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        ArtifactResults results = (ArtifactResults) warnings.next();
-        warnings = results.getNotices().iterator();
-        assertTrue( "check there is a failure", warnings.hasNext() );
-        Result result = (Result) warnings.next();
-        assertArtifact( results );
-        assertEquals( "check failure reason", "First Notice", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertTrue( "must have 2nd warning", warnings.hasNext() );
-        result = (Result) warnings.next();
-        assertEquals( "check failure reason", "Second Notice", result.getReason() );
-        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
-        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
-        assertFalse( "no more warnings", warnings.hasNext() );
-    }
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
-        Versioning versioning = new Versioning();
-        versioning.addVersion( "1.0-alpha-1" );
-        versioning.addVersion( "1.0-alpha-2" );
-
-        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DependencyArtifactReportProcessorTest.java
deleted file mode 100644 (file)
index 50e58fd..0000000
+++ /dev/null
@@ -1,302 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.model.Dependency;
-import org.apache.maven.model.Model;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class DependencyArtifactReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private static final String VALID_GROUP_ID = "groupId";
-
-    private static final String VALID_ARTIFACT_ID = "artifactId";
-
-    private static final String VALID_VERSION = "1.0-alpha-1";
-
-    private ReportingDatabase reportingDatabase;
-
-    private Model model;
-
-    private ArtifactReportProcessor processor;
-
-    private ArtifactFactory artifactFactory;
-
-    private static final String INVALID = "invalid";
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-        model = new Model();
-        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
-
-        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportingDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    public void testArtifactFoundButNoDirectDependencies()
-    {
-        Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-    }
-
-    private Artifact createValidArtifact()
-    {
-        Artifact projectArtifact =
-            artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
-        projectArtifact.setRepository( repository );
-        return projectArtifact;
-    }
-
-    public void testArtifactNotFound()
-    {
-        Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
-        artifact.setRepository( repository );
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( "Artifact does not exist in the repository", result.getReason() );
-    }
-
-    public void testValidArtifactWithNullDependency()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createValidDependency();
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-    }
-
-    private Dependency createValidDependency()
-    {
-        return createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
-    }
-
-    public void testValidArtifactWithValidSingleDependency()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createValidDependency();
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-    }
-
-    public void testValidArtifactWithValidMultipleDependencies()
-    {
-        Dependency dependency = createValidDependency();
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-
-        Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-    }
-
-    public void testValidArtifactWithAnInvalidDependency()
-    {
-        Dependency dependency = createValidDependency();
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( dependency );
-        model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
-
-        Artifact artifact = createValidArtifact();
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
-                      result.getReason() );
-    }
-
-    public void testValidArtifactWithInvalidDependencyGroupId()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
-    }
-
-    private Dependency createDependency( String o, String valid, String s )
-    {
-        Dependency dependency = new Dependency();
-        dependency.setGroupId( o );
-        dependency.setArtifactId( valid );
-        dependency.setVersion( s );
-        return dependency;
-    }
-
-    public void testValidArtifactWithInvalidDependencyArtifactId()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
-    }
-
-    public void testValidArtifactWithIncorrectDependencyVersion()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
-    }
-
-    public void testValidArtifactWithInvalidDependencyVersion()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyVersionInvalidMessage( dependency, "[" ), result.getReason() );
-    }
-
-    public void testValidArtifactWithInvalidDependencyVersionRange()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 0, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-    }
-
-    public void testValidArtifactWithMissingDependencyVersion()
-    {
-        Artifact artifact = createValidArtifact();
-
-        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
-        model.addDependency( dependency );
-
-        processor.processArtifact( artifact, model, reportingDatabase );
-        assertEquals( 1, reportingDatabase.getNumFailures() );
-        assertEquals( 0, reportingDatabase.getNumWarnings() );
-        assertEquals( 0, reportingDatabase.getNumNotices() );
-
-        Iterator failures = reportingDatabase.getArtifactIterator();
-        ArtifactResults results = (ArtifactResults) failures.next();
-        assertFalse( failures.hasNext() );
-        failures = results.getFailures().iterator();
-        Result result = (Result) failures.next();
-        assertEquals( getDependencyVersionInvalidMessage( dependency, null ), result.getReason() );
-    }
-
-    private String getDependencyVersionInvalidMessage( Dependency dependency, String version )
-    {
-        return "Artifact's dependency " + getDependencyString( dependency ) + " contains an invalid version " + version;
-    }
-
-    private static String getDependencyString( Dependency dependency )
-    {
-        return DependencyArtifactReportProcessor.getDependencyString( dependency );
-    }
-
-    private String getDependencyNotFoundMessage( Dependency dependency )
-    {
-        return "Artifact's dependency " + getDependencyString( dependency ) + " does not exist in the repository";
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.java
deleted file mode 100644 (file)
index 3ad4bad..0000000
+++ /dev/null
@@ -1,144 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.model.Model;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.Collections;
-
-/**
- * @author Edwin Punzalan
- */
-public class DuplicateArtifactFileReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private Artifact artifact;
-
-    private Model model;
-
-    private ArtifactReportProcessor processor;
-
-    private ArtifactFactory artifactFactory;
-
-    File indexDirectory;
-
-    private ReportingDatabase reportDatabase;
-
-    protected void setUp()
-        throws Exception
-    {
-        super.setUp();
-
-        indexDirectory = getTestFile( "target/indexDirectory" );
-        FileUtils.deleteDirectory( indexDirectory );
-
-        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-        artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
-        model = new Model();
-
-        RepositoryArtifactIndexFactory factory =
-            (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
-        RepositoryArtifactIndex index = factory.createStandardIndex( indexDirectory );
-
-        RepositoryIndexRecordFactory recordFactory =
-            (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
-        index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
-
-        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    public void testNullArtifactFile()
-        throws Exception
-    {
-        artifact.setFile( null );
-
-        processor.processArtifact( artifact, model, reportDatabase );
-
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
-    }
-
-    public void testSuccessOnAlreadyIndexedArtifact()
-        throws Exception
-    {
-        processor.processArtifact( artifact, model, reportDatabase );
-
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
-    }
-
-    public void testSuccessOnDifferentGroupId()
-        throws Exception
-    {
-        artifact.setGroupId( "different.groupId" );
-        processor.processArtifact( artifact, model, reportDatabase );
-
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
-    }
-
-    public void testSuccessOnNewArtifact()
-        throws Exception
-    {
-        Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
-
-        processor.processArtifact( newArtifact, model, reportDatabase );
-
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
-    }
-
-    public void testFailure()
-        throws Exception
-    {
-        Artifact duplicate = createArtifact( artifact.getGroupId(), "snapshot-artifact", "1.0-alpha-1-SNAPSHOT",
-                                             artifact.getVersion(), artifact.getType() );
-        duplicate.setFile( artifact.getFile() );
-
-        processor.processArtifact( duplicate, model, reportDatabase );
-
-        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-        assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
-    }
-
-    private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
-                                     String type )
-    {
-        Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
-        artifact.setBaseVersion( baseVersion );
-        artifact.setRepository( repository );
-        artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
-        return artifact;
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/InvalidPomArtifactReportProcessorTest.java
deleted file mode 100644 (file)
index afea347..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * This class tests the InvalidPomArtifactReportProcessor class.
- */
-public class InvalidPomArtifactReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ArtifactReportProcessor artifactReportProcessor;
-
-    private ReportingDatabase reportDatabase;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    /**
-     * Test the InvalidPomArtifactReportProcessor when the artifact is an invalid pom.
-     */
-    public void testInvalidPomArtifactReportProcessorFailure()
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
-    }
-
-
-    /**
-     * Test the InvalidPomArtifactReportProcessor when the artifact is a valid pom.
-     */
-    public void testInvalidPomArtifactReportProcessorSuccess()
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-
-    /**
-     * Test the InvalidPomArtifactReportProcessor when the artifact is not a pom.
-     */
-    public void testNotAPomArtifactReportProcessorSuccess()
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/LocationArtifactReportProcessorTest.java
deleted file mode 100644 (file)
index 3b6f6df..0000000
+++ /dev/null
@@ -1,224 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-
-/**
- * This class tests the LocationArtifactReportProcessor.
- */
-public class LocationArtifactReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ArtifactReportProcessor artifactReportProcessor;
-
-    private ReportingDatabase reportDatabase;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
-        reportDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact's physical location matches the location specified
-     * both in the file system pom and in the pom included in the package.
-     */
-    public void testPackagedPomLocationArtifactReporterSuccess()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is in the location specified in the
-     * file system pom (but the jar file does not have a pom included in its package).
-     */
-    public void testLocationArtifactReporterSuccess()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is in the location specified in the
-     * file system pom, but the pom itself is passed in.
-     */
-    public void testLocationArtifactReporterSuccessPom()
-        throws IOException, XmlPullParserException
-    {
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is in the location specified in the
-     * file system pom, with a classifier.
-     */
-    public void testLocationArtifactReporterSuccessClassifier()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "java-source" );
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is in the location specified in the
-     * file system pom, with a classifier.
-     */
-    public void testLocationArtifactReporterSuccessZip()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact =
-            createArtifactWithClassifier( "groupId", "artifactId", "1.0-alpha-1", "distribution-zip", "src" );
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is in the location specified in the
-     * file system pom, with a classifier.
-     */
-    public void testLocationArtifactReporterSuccessTgz()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact =
-            createArtifactWithClassifier( "groupId", "artifactId", "1.0-alpha-1", "distribution-tgz", "src" );
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact is not in the location specified
-     * in the file system pom.
-     */
-    public void testLocationArtifactReporterFailure()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2" );
-        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
-
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        
-        assertEquals( 1, reportDatabase.getNumFailures() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact's physical location does not match the
-     * location in the file system pom but instead matches the specified location in the packaged pom.
-     */
-    public void testFsPomArtifactMatchFailure()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0" );
-
-        Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
-        Model model = readPom( repository.pathOf( pomArtifact ) );
-        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
-    }
-
-    private Model readPom( String path )
-        throws IOException, XmlPullParserException
-    {
-        Reader reader = new FileReader( new File( repository.getBasedir(), path ) );
-        Model model = new MavenXpp3Reader().read( reader );
-        // hokey inheritence to avoid some errors right now
-        if ( model.getGroupId() == null )
-        {
-            model.setGroupId( model.getParent().getGroupId() );
-        }
-        if ( model.getVersion() == null )
-        {
-            model.setVersion( model.getParent().getVersion() );
-        }
-        return model;
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact's physical location does not match the
-     * location specified in the packaged pom but matches the location specified in the file system pom.
-     */
-    public void testPkgPomArtifactMatchFailure()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
-    }
-
-    /**
-     * Test the LocationArtifactReporter when the artifact's physical location does not match both the
-     * location specified in the packaged pom and the location specified in the file system pom.
-     */
-    public void testBothPomArtifactMatchFailure()
-        throws IOException, XmlPullParserException
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 1, reportDatabase.getNumFailures() );
-    }
-
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.java
deleted file mode 100644 (file)
index f1c5073..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.Iterator;
-
-/**
- * This class tests the OldArtifactReportProcessor.
- */
-public class OldArtifactReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ArtifactReportProcessor artifactReportProcessor;
-
-    private ReportingDatabase reportDatabase;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
-    }
-
-    public void testOldArtifact()
-    {
-        Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
-        assertEquals( artifact.getArtifactId(), results.getArtifactId() );
-        assertEquals( artifact.getGroupId(), results.getGroupId() );
-        assertEquals( artifact.getVersion(), results.getVersion() );
-        assertEquals( 1, results.getNotices().size() );
-        Iterator i = results.getNotices().iterator();
-        Result result = (Result) i.next();
-        assertEquals( "old-artifact", result.getProcessor() );
-    }
-
-    public void testNewArtifact()
-        throws Exception
-    {
-        File repository = getTestFile( "target/test-repository" );
-
-        FileUtils.copyDirectoryStructure( getTestFile( "src/test/repository/groupId" ),
-                                          new File( repository, "groupId" ) );
-
-        Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    public void testMissingArtifact()
-        throws Exception
-    {
-        Artifact artifact = createArtifact( "foo", "bar", "XP" );
-
-        try
-        {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-            fail( "Should not have passed" );
-        }
-        catch ( IllegalStateException e )
-        {
-            assertTrue( true );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.java
deleted file mode 100644 (file)
index 93c7de6..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Iterator;
-
-/**
- * This class tests the OldArtifactReportProcessor.
- */
-public class OldSnapshotArtifactReportProcessorTest
-    extends AbstractRepositoryReportsTestCase
-{
-    private ArtifactReportProcessor artifactReportProcessor;
-
-    private ReportingDatabase reportDatabase;
-
-    private File tempRepository;
-
-    public void setUp()
-        throws Exception
-    {
-        super.setUp();
-        artifactReportProcessor =
-            (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
-
-        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
-        reportDatabase = new ReportingDatabase( reportGroup );
-        tempRepository = getTestFile( "target/test-repository" );
-        FileUtils.deleteDirectory( tempRepository );
-    }
-
-    public void testOldSnapshotArtifact()
-    {
-        Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
-        assertArtifactResults( artifactIterator, artifact );
-    }
-
-    private static void assertArtifactResults( Iterator artifactIterator, Artifact artifact )
-    {
-        ArtifactResults results = (ArtifactResults) artifactIterator.next();
-        assertEquals( artifact.getArtifactId(), results.getArtifactId() );
-        assertEquals( artifact.getGroupId(), results.getGroupId() );
-        assertEquals( artifact.getVersion(), results.getVersion() );
-        assertFalse( artifact.getVersion().indexOf( "SNAPSHOT" ) >= 0 );
-        assertEquals( 1, results.getNotices().size() );
-        Iterator i = results.getNotices().iterator();
-        Result result = (Result) i.next();
-        assertEquals( "old-snapshot-artifact", result.getProcessor() );
-    }
-
-    public void testSNAPSHOTArtifact()
-    {
-        Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    public void testNonSnapshotArtifact()
-    {
-        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    public void testNewSnapshotArtifact()
-        throws Exception
-    {
-        File repository = getTestFile( "target/test-repository" );
-
-        File dir = new File( repository, "groupId/artifactId/1.0-alpha-1-SNAPSHOT" );
-        dir.mkdirs();
-
-        String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
-        FileUtils.fileWrite( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ).getAbsolutePath(), "foo" );
-
-        Artifact artifact =
-            createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
-
-        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
-    }
-
-    public void testTooManySnapshotArtifact()
-        throws Exception
-    {
-        File dir = new File( tempRepository, "groupId/artifactId/1.0-alpha-1-SNAPSHOT" );
-        dir.mkdirs();
-
-        String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
-        for ( int i = 1; i <= 5; i++ )
-        {
-            FileUtils.fileWrite( new File( dir, "artifactId-1.0-alpha-1-" + date + "-" + i + ".jar" ).getAbsolutePath(),
-                                 "foo" );
-        }
-
-        for ( int i = 1; i <= 5; i++ )
-        {
-            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
-                                                              "1.0-alpha-1-" + date + "-" + i );
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-        }
-
-        assertEquals( 0, reportDatabase.getNumFailures() );
-        assertEquals( 0, reportDatabase.getNumWarnings() );
-        assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
-        Iterator artifactIterator = reportDatabase.getArtifactIterator();
-        for ( int i = 1; i <= 3; i++ )
-        {
-            String version = "1.0-alpha-1-" + date + "-" + i;
-            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", version );
-            assertArtifactResults( artifactIterator, artifact );
-        }
-    }
-
-    public void testMissingArtifact()
-        throws Exception
-    {
-        Artifact artifact = createArtifact( "foo", "bar", "XP" );
-
-        try
-        {
-            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
-            fail( "Should not have passed" );
-        }
-        catch ( IllegalStateException e )
-        {
-            assertTrue( true );
-        }
-    }
-}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessorTest.java
new file mode 100644 (file)
index 0000000..453d07b
--- /dev/null
@@ -0,0 +1,437 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Plugin;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+
+import java.util.Iterator;
+
+public class BadMetadataReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ArtifactFactory artifactFactory;
+
+    private MetadataReportProcessor badMetadataReportProcessor;
+
+    private ReportingDatabase reportingDatabase;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+        badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportingDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    public void testMetadataMissingLastUpdated()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( metadata, results );
+        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testMetadataMissingVersioning()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        assertMetadata( metadata, results );
+        Result result = (Result) failures.next();
+        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
+        result = (Result) failures.next();
+        boolean alpha1First = false;
+        if ( result.getReason().indexOf( "alpha-1" ) > 0 )
+        {
+            alpha1First = true;
+        }
+        if ( alpha1First )
+        {
+            assertEquals( "check reason",
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
+                          result.getReason() );
+        }
+        else
+        {
+            assertEquals( "check reason",
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+                          result.getReason() );
+        }
+        result = (Result) failures.next();
+        if ( !alpha1First )
+        {
+            assertEquals( "check reason",
+                          "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
+                          result.getReason() );
+        }
+        else
+        {
+            assertEquals( "check reason",
+                          "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+                          result.getReason() );
+        }
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testSnapshotMetadataMissingVersioning()
+    {
+        Artifact artifact =
+            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        assertMetadata( metadata, results );
+        Result result = (Result) failures.next();
+        assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testMetadataValidVersions()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+        versioning.setLastUpdated( "20050611.202020" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertFalse( "check there are no failures", failures.hasNext() );
+    }
+
+    public void testMetadataMissingADirectory()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.setLastUpdated( "20050611.202020" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( metadata, results );
+        // TODO: should be more robust
+        assertEquals( "check reason",
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testMetadataInvalidArtifactVersion()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+        versioning.addVersion( "1.0-alpha-3" );
+        versioning.setLastUpdated( "20050611.202020" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( metadata, results );
+        // TODO: should be more robust
+        assertEquals( "check reason",
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testMoreThanOneMetadataVersionErrors()
+    {
+        Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-3" );
+        versioning.setLastUpdated( "20050611.202020" );
+
+        RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( metadata, results );
+        // TODO: should be more robust
+        assertEquals( "check reason",
+                      "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
+                      result.getReason() );
+        assertTrue( "check there is a 2nd failure", failures.hasNext() );
+        result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason",
+                      "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testValidPluginMetadata()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertFalse( "check there are no failures", failures.hasNext() );
+    }
+
+    public void testMissingMetadataPlugin()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testIncompletePluginMetadata()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason",
+                      "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testInvalidPluginArtifactId()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
+                      result.getReason() );
+        assertTrue( "check there is a 2nd failure", failures.hasNext() );
+        result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testInvalidPluginPrefix()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId artifactId.", result.getReason() );
+        assertTrue( "check there is a 2nd failure", failures.hasNext() );
+        result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testDuplicatePluginPrefixes()
+    {
+        RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+        metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        // TODO: should be more robust
+        assertEquals( "check reason", "Duplicate plugin prefix found: default.", result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    public void testValidSnapshotMetadata()
+    {
+        Artifact artifact =
+            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+
+        Snapshot snapshot = new Snapshot();
+        snapshot.setBuildNumber( 1 );
+        snapshot.setTimestamp( "20050611.202024" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertFalse( "check there are no failures", failures.hasNext() );
+    }
+
+    public void testInvalidSnapshotMetadata()
+    {
+        Artifact artifact =
+            artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+
+        Snapshot snapshot = new Snapshot();
+        snapshot.setBuildNumber( 2 );
+        snapshot.setTimestamp( "20050611.202024" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
+
+        badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( metadata, results );
+        // TODO: should be more robust
+        assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
+                      result.getReason() );
+        assertFalse( "check no more failures", failures.hasNext() );
+    }
+
+    private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
+    {
+        assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
+        assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
+        assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+    }
+
+    private Plugin createMetadataPlugin( String artifactId, String prefix )
+    {
+        Plugin plugin = new Plugin();
+        plugin.setArtifactId( artifactId );
+        plugin.setName( artifactId );
+        plugin.setPrefix( prefix );
+        return plugin;
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessorTest.java
new file mode 100644 (file)
index 0000000..4800e22
--- /dev/null
@@ -0,0 +1,307 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.DependencyArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.Model;
+
+import java.util.Iterator;
+
+/**
+ *
+ */
+public class DependencyArtifactReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private static final String VALID_GROUP_ID = "groupId";
+
+    private static final String VALID_ARTIFACT_ID = "artifactId";
+
+    private static final String VALID_VERSION = "1.0-alpha-1";
+
+    private ReportingDatabase reportingDatabase;
+
+    private Model model;
+
+    private ArtifactReportProcessor processor;
+
+    private ArtifactFactory artifactFactory;
+
+    private static final String INVALID = "invalid";
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+        model = new Model();
+        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
+
+        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportingDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    public void testArtifactFoundButNoDirectDependencies()
+    {
+        Artifact artifact = createValidArtifact();
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+    }
+
+    private Artifact createValidArtifact()
+    {
+        Artifact projectArtifact =
+            artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+        projectArtifact.setRepository( repository );
+        return projectArtifact;
+    }
+
+    public void testArtifactNotFound()
+    {
+        Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
+        artifact.setRepository( repository );
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( "Artifact does not exist in the repository", result.getReason() );
+    }
+
+    public void testValidArtifactWithNullDependency()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createValidDependency();
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+    }
+
+    private Dependency createValidDependency()
+    {
+        return createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+    }
+
+    public void testValidArtifactWithValidSingleDependency()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createValidDependency();
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+    }
+
+    public void testValidArtifactWithValidMultipleDependencies()
+    {
+        Dependency dependency = createValidDependency();
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+
+        Artifact artifact = createValidArtifact();
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+    }
+
+    public void testValidArtifactWithAnInvalidDependency()
+    {
+        Dependency dependency = createValidDependency();
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( dependency );
+        model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
+
+        Artifact artifact = createValidArtifact();
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
+                      result.getReason() );
+    }
+
+    public void testValidArtifactWithInvalidDependencyGroupId()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
+    }
+
+    private Dependency createDependency( String o, String valid, String s )
+    {
+        Dependency dependency = new Dependency();
+        dependency.setGroupId( o );
+        dependency.setArtifactId( valid );
+        dependency.setVersion( s );
+        return dependency;
+    }
+
+    public void testValidArtifactWithInvalidDependencyArtifactId()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
+    }
+
+    public void testValidArtifactWithIncorrectDependencyVersion()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
+    }
+
+    public void testValidArtifactWithInvalidDependencyVersion()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyVersionInvalidMessage( dependency, "[" ), result.getReason() );
+    }
+
+    public void testValidArtifactWithInvalidDependencyVersionRange()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+    }
+
+    public void testValidArtifactWithMissingDependencyVersion()
+    {
+        Artifact artifact = createValidArtifact();
+
+        Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
+        model.addDependency( dependency );
+
+        processor.processArtifact( artifact, model, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        ArtifactResults results = (ArtifactResults) failures.next();
+        assertFalse( failures.hasNext() );
+        failures = results.getFailures().iterator();
+        Result result = (Result) failures.next();
+        assertEquals( getDependencyVersionInvalidMessage( dependency, null ), result.getReason() );
+    }
+
+    private String getDependencyVersionInvalidMessage( Dependency dependency, String version )
+    {
+        return "Artifact's dependency " + getDependencyString( dependency ) + " contains an invalid version " + version;
+    }
+
+    private static String getDependencyString( Dependency dependency )
+    {
+        return DependencyArtifactReportProcessor.getDependencyString( dependency );
+    }
+
+    private String getDependencyNotFoundMessage( Dependency dependency )
+    {
+        return "Artifact's dependency " + getDependencyString( dependency ) + " does not exist in the repository";
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.java
new file mode 100644 (file)
index 0000000..5f8e39d
--- /dev/null
@@ -0,0 +1,149 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.model.Model;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.Collections;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class DuplicateArtifactFileReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private Artifact artifact;
+
+    private Model model;
+
+    private ArtifactReportProcessor processor;
+
+    private ArtifactFactory artifactFactory;
+
+    File indexDirectory;
+
+    private ReportingDatabase reportDatabase;
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        indexDirectory = getTestFile( "target/indexDirectory" );
+        FileUtils.deleteDirectory( indexDirectory );
+
+        artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+        artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
+        System.out.println( "artifact = " + artifact );
+        model = new Model();
+
+        RepositoryArtifactIndexFactory factory =
+            (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+        RepositoryArtifactIndex index = factory.createStandardIndex( indexDirectory );
+
+        RepositoryIndexRecordFactory recordFactory =
+            (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+        index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
+
+        processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    public void testNullArtifactFile()
+        throws Exception
+    {
+        artifact.setFile( null );
+
+        processor.processArtifact( artifact, model, reportDatabase );
+
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+    }
+
+    public void testSuccessOnAlreadyIndexedArtifact()
+        throws Exception
+    {
+        processor.processArtifact( artifact, model, reportDatabase );
+
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+    }
+
+    public void testSuccessOnDifferentGroupId()
+        throws Exception
+    {
+        artifact.setGroupId( "different.groupId" );
+        processor.processArtifact( artifact, model, reportDatabase );
+
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+    }
+
+    public void testSuccessOnNewArtifact()
+        throws Exception
+    {
+        Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
+
+        processor.processArtifact( newArtifact, model, reportDatabase );
+
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+    }
+
+    public void testFailure()
+        throws Exception
+    {
+        Artifact duplicate = createArtifact( artifact.getGroupId(), "snapshot-artifact", "1.0-alpha-1-SNAPSHOT",
+                                             artifact.getVersion(), artifact.getType() );
+        duplicate.setFile( artifact.getFile() );
+
+        processor.processArtifact( duplicate, model, reportDatabase );
+
+        assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+        assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
+    }
+
+    private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
+                                     String type )
+    {
+        Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
+        artifact.setBaseVersion( baseVersion );
+        artifact.setRepository( repository );
+        artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+        return artifact;
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessorTest.java
new file mode 100644 (file)
index 0000000..07ac2a0
--- /dev/null
@@ -0,0 +1,83 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+
+/**
+ * This class tests the InvalidPomArtifactReportProcessor class.
+ */
+public class InvalidPomArtifactReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private ReportingDatabase reportDatabase;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    /**
+     * Test the InvalidPomArtifactReportProcessor when the artifact is an invalid pom.
+     */
+    public void testInvalidPomArtifactReportProcessorFailure()
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 1, reportDatabase.getNumFailures() );
+    }
+
+
+    /**
+     * Test the InvalidPomArtifactReportProcessor when the artifact is a valid pom.
+     */
+    public void testInvalidPomArtifactReportProcessorSuccess()
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+
+    /**
+     * Test the InvalidPomArtifactReportProcessor when the artifact is not a pom.
+     */
+    public void testNotAPomArtifactReportProcessorSuccess()
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessorTest.java
new file mode 100644 (file)
index 0000000..9d3e997
--- /dev/null
@@ -0,0 +1,228 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * This class tests the LocationArtifactReportProcessor.
+ */
+public class LocationArtifactReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private ReportingDatabase reportDatabase;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact's physical location matches the location specified
+     * both in the file system pom and in the pom included in the package.
+     */
+    public void testPackagedPomLocationArtifactReporterSuccess()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is in the location specified in the
+     * file system pom (but the jar file does not have a pom included in its package).
+     */
+    public void testLocationArtifactReporterSuccess()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is in the location specified in the
+     * file system pom, but the pom itself is passed in.
+     */
+    public void testLocationArtifactReporterSuccessPom()
+        throws IOException, XmlPullParserException
+    {
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is in the location specified in the
+     * file system pom, with a classifier.
+     */
+    public void testLocationArtifactReporterSuccessClassifier()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "java-source" );
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is in the location specified in the
+     * file system pom, with a classifier.
+     */
+    public void testLocationArtifactReporterSuccessZip()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact =
+            createArtifactWithClassifier( "groupId", "artifactId", "1.0-alpha-1", "distribution-zip", "src" );
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is in the location specified in the
+     * file system pom, with a classifier.
+     */
+    public void testLocationArtifactReporterSuccessTgz()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact =
+            createArtifactWithClassifier( "groupId", "artifactId", "1.0-alpha-1", "distribution-tgz", "src" );
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact is not in the location specified
+     * in the file system pom.
+     */
+    public void testLocationArtifactReporterFailure()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2" );
+        Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
+
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        
+        assertEquals( 1, reportDatabase.getNumFailures() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact's physical location does not match the
+     * location in the file system pom but instead matches the specified location in the packaged pom.
+     */
+    public void testFsPomArtifactMatchFailure()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0" );
+
+        Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
+        Model model = readPom( repository.pathOf( pomArtifact ) );
+        artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+        assertEquals( 1, reportDatabase.getNumFailures() );
+    }
+
+    private Model readPom( String path )
+        throws IOException, XmlPullParserException
+    {
+        Reader reader = new FileReader( new File( repository.getBasedir(), path ) );
+        Model model = new MavenXpp3Reader().read( reader );
+        // hokey inheritence to avoid some errors right now
+        if ( model.getGroupId() == null )
+        {
+            model.setGroupId( model.getParent().getGroupId() );
+        }
+        if ( model.getVersion() == null )
+        {
+            model.setVersion( model.getParent().getVersion() );
+        }
+        return model;
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact's physical location does not match the
+     * location specified in the packaged pom but matches the location specified in the file system pom.
+     */
+    public void testPkgPomArtifactMatchFailure()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 1, reportDatabase.getNumFailures() );
+    }
+
+    /**
+     * Test the LocationArtifactReporter when the artifact's physical location does not match both the
+     * location specified in the packaged pom and the location specified in the file system pom.
+     */
+    public void testBothPomArtifactMatchFailure()
+        throws IOException, XmlPullParserException
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 1, reportDatabase.getNumFailures() );
+    }
+
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.java
new file mode 100644 (file)
index 0000000..95d3c90
--- /dev/null
@@ -0,0 +1,100 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.Iterator;
+
+/**
+ * This class tests the OldArtifactReportProcessor.
+ */
+public class OldArtifactReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private ReportingDatabase reportDatabase;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
+        reportDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    public void testOldArtifact()
+    {
+        Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
+        ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
+        assertEquals( artifact.getArtifactId(), results.getArtifactId() );
+        assertEquals( artifact.getGroupId(), results.getGroupId() );
+        assertEquals( artifact.getVersion(), results.getVersion() );
+        assertEquals( 1, results.getNotices().size() );
+        Iterator i = results.getNotices().iterator();
+        Result result = (Result) i.next();
+        assertEquals( "old-artifact", result.getProcessor() );
+    }
+
+    public void testNewArtifact()
+        throws Exception
+    {
+        File repository = getTestFile( "target/test-repository" );
+
+        FileUtils.copyDirectoryStructure( getTestFile( "src/test/repository/groupId" ),
+                                          new File( repository, "groupId" ) );
+
+        Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    public void testMissingArtifact()
+        throws Exception
+    {
+        Artifact artifact = createArtifact( "foo", "bar", "XP" );
+
+        try
+        {
+            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            fail( "Should not have passed" );
+        }
+        catch ( IllegalStateException e )
+        {
+            assertTrue( true );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.java
new file mode 100644 (file)
index 0000000..55a5be3
--- /dev/null
@@ -0,0 +1,170 @@
+package org.apache.maven.archiva.reporting.processor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Iterator;
+
+/**
+ * This class tests the OldArtifactReportProcessor.
+ */
+public class OldSnapshotArtifactReportProcessorTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private ReportingDatabase reportDatabase;
+
+    private File tempRepository;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        artifactReportProcessor =
+            (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
+        reportDatabase = new ReportingDatabase( reportGroup );
+        tempRepository = getTestFile( "target/test-repository" );
+        FileUtils.deleteDirectory( tempRepository );
+    }
+
+    public void testOldSnapshotArtifact()
+    {
+        Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
+        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        assertArtifactResults( artifactIterator, artifact );
+    }
+
+    private static void assertArtifactResults( Iterator artifactIterator, Artifact artifact )
+    {
+        ArtifactResults results = (ArtifactResults) artifactIterator.next();
+        assertEquals( artifact.getArtifactId(), results.getArtifactId() );
+        assertEquals( artifact.getGroupId(), results.getGroupId() );
+        assertEquals( artifact.getVersion(), results.getVersion() );
+        assertFalse( artifact.getVersion().indexOf( "SNAPSHOT" ) >= 0 );
+        assertEquals( 1, results.getNotices().size() );
+        Iterator i = results.getNotices().iterator();
+        Result result = (Result) i.next();
+        assertEquals( "old-snapshot-artifact", result.getProcessor() );
+    }
+
+    public void testSNAPSHOTArtifact()
+    {
+        Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    public void testNonSnapshotArtifact()
+    {
+        Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    public void testNewSnapshotArtifact()
+        throws Exception
+    {
+        File repository = getTestFile( "target/test-repository" );
+
+        File dir = new File( repository, "groupId/artifactId/1.0-alpha-1-SNAPSHOT" );
+        dir.mkdirs();
+
+        String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
+        FileUtils.fileWrite( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ).getAbsolutePath(), "foo" );
+
+        Artifact artifact =
+            createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+    }
+
+    public void testTooManySnapshotArtifact()
+        throws Exception
+    {
+        File dir = new File( tempRepository, "groupId/artifactId/1.0-alpha-1-SNAPSHOT" );
+        dir.mkdirs();
+
+        String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
+        for ( int i = 1; i <= 5; i++ )
+        {
+            FileUtils.fileWrite( new File( dir, "artifactId-1.0-alpha-1-" + date + "-" + i + ".jar" ).getAbsolutePath(),
+                                 "foo" );
+        }
+
+        for ( int i = 1; i <= 5; i++ )
+        {
+            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
+                                                              "1.0-alpha-1-" + date + "-" + i );
+            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+        }
+
+        assertEquals( 0, reportDatabase.getNumFailures() );
+        assertEquals( 0, reportDatabase.getNumWarnings() );
+        assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
+        Iterator artifactIterator = reportDatabase.getArtifactIterator();
+        for ( int i = 1; i <= 3; i++ )
+        {
+            String version = "1.0-alpha-1-" + date + "-" + i;
+            Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", version );
+            assertArtifactResults( artifactIterator, artifact );
+        }
+    }
+
+    public void testMissingArtifact()
+        throws Exception
+    {
+        Artifact artifact = createArtifact( "foo", "bar", "XP" );
+
+        try
+        {
+            artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+            fail( "Should not have passed" );
+        }
+        catch ( IllegalStateException e )
+        {
+            assertTrue( true );
+        }
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AbstractChecksumArtifactReporterTestCase.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/AbstractChecksumArtifactReporterTestCase.java
new file mode 100644 (file)
index 0000000..4974d8b
--- /dev/null
@@ -0,0 +1,284 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.digest.Digester;
+import org.codehaus.plexus.digest.DigesterException;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.IOUtil;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.util.jar.JarEntry;
+import java.util.jar.JarOutputStream;
+
+/**
+ * This class creates the artifact and metadata files used for testing the ChecksumArtifactReportProcessor.
+ * It is extended by ChecksumArtifactReporterTest class.
+ */
+public abstract class AbstractChecksumArtifactReporterTestCase
+    extends AbstractRepositoryReportsTestCase
+{
+    private static final String[] validArtifactChecksumJars = {"validArtifact-1.0"};
+
+    private static final String[] invalidArtifactChecksumJars = {"invalidArtifact-1.0"};
+
+    private static final String metadataChecksumFilename = "maven-metadata";
+
+    private Digester sha1Digest;
+
+    private Digester md5Digest;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
+        md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
+    }
+
+    /**
+     * Create checksum files.
+     *
+     * @param type The type of checksum file to be created.
+     */
+    protected void createChecksumFile( String type )
+        throws DigesterException, IOException
+    {
+        //loop through the valid artifact names..
+        if ( "VALID".equals( type ) )
+        {
+            for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+            {
+                writeChecksumFile( "checksumTest/", validArtifactChecksumJars[i], "jar", true );
+            }
+        }
+        else if ( "INVALID".equals( type ) )
+        {
+            for ( int i = 0; i < invalidArtifactChecksumJars.length; i++ )
+            {
+                writeChecksumFile( "checksumTest/", invalidArtifactChecksumJars[i], "jar", false );
+            }
+        }
+    }
+
+    /**
+     * Create checksum files for metadata.
+     *
+     * @param type The type of checksum to be created. (Valid or invalid)
+     */
+    protected void createMetadataFile( String type )
+        throws DigesterException, IOException
+    {
+        //loop through the valid artifact names..
+        if ( "VALID".equals( type ) )
+        {
+            writeMetadataFile( "checksumTest/validArtifact/1.0/", metadataChecksumFilename, "xml", true );
+            writeMetadataFile( "checksumTest/validArtifact/", metadataChecksumFilename, "xml", true );
+            writeMetadataFile( "checksumTest/", metadataChecksumFilename, "xml", true );
+        }
+        else if ( "INVALID".equals( type ) )
+        {
+            writeMetadataFile( "checksumTest/invalidArtifact/1.0/", metadataChecksumFilename, "xml", false );
+        }
+    }
+
+    /**
+     * Create artifact together with its checksums.
+     *
+     * @param relativePath The groupId
+     * @param filename     The filename of the artifact to be created.
+     * @param type         The file type (JAR)
+     * @param isValid      Indicates whether the checksum to be created is valid or not.
+     */
+    private void writeChecksumFile( String relativePath, String filename, String type, boolean isValid )
+        throws IOException, DigesterException
+    {
+        //Initialize variables for creating jar files
+        String repoUrl = repository.getBasedir();
+
+        String dirs = filename.replace( '-', '/' );
+        //create the group level directory of the artifact
+        File dirFiles = new File( repoUrl + relativePath + dirs );
+
+        if ( dirFiles.mkdirs() )
+        {
+            // create a jar file
+            String path = repoUrl + relativePath + dirs + "/" + filename + "." + type;
+            FileOutputStream f = new FileOutputStream( path );
+            JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
+
+            // jar sample.txt
+            String filename1 = repoUrl + relativePath + dirs + "/sample.txt";
+            createSampleFile( filename1 );
+
+            BufferedReader in = new BufferedReader( new FileReader( filename1 ) );
+            out.putNextEntry( new JarEntry( filename1 ) );
+            IOUtil.copy( in, out );
+            in.close();
+            out.close();
+
+            //Create md5 and sha-1 checksum files..
+
+            File file = new File( path + ".md5" );
+            OutputStream os = new FileOutputStream( file );
+            OutputStreamWriter osw = new OutputStreamWriter( os );
+            String sum = md5Digest.calc( new File( path ) );
+            if ( !isValid )
+            {
+                osw.write( sum + "1" );
+            }
+            else
+            {
+                osw.write( sum );
+            }
+            osw.close();
+
+            file = new File( path + ".sha1" );
+            os = new FileOutputStream( file );
+            osw = new OutputStreamWriter( os );
+            String sha1sum = sha1Digest.calc( new File( path ) );
+            if ( !isValid )
+            {
+                osw.write( sha1sum + "2" );
+            }
+            else
+            {
+                osw.write( sha1sum );
+            }
+            osw.close();
+        }
+    }
+
+    /**
+     * Create metadata file together with its checksums.
+     *
+     * @param relativePath The groupId
+     * @param filename     The filename of the artifact to be created.
+     * @param type         The file type (JAR)
+     * @param isValid      Indicates whether the checksum to be created is valid or not.
+     */
+    private void writeMetadataFile( String relativePath, String filename, String type, boolean isValid )
+        throws IOException, DigesterException
+    {
+        //create checksum for the metadata file..
+        String repoUrl = repository.getBasedir();
+        String url = repository.getBasedir() + "/" + filename + "." + type;
+
+        String path = repoUrl + relativePath + filename + "." + type;
+        FileUtils.copyFile( new File( url ), new File( path ) );
+
+        //Create md5 and sha-1 checksum files..
+        File file = new File( path + ".md5" );
+        OutputStream os = new FileOutputStream( file );
+        OutputStreamWriter osw = new OutputStreamWriter( os );
+        String md5sum = md5Digest.calc( new File( path ) );
+        if ( !isValid )
+        {
+            osw.write( md5sum + "1" );
+        }
+        else
+        {
+            osw.write( md5sum );
+        }
+        osw.close();
+
+        file = new File( path + ".sha1" );
+        os = new FileOutputStream( file );
+        osw = new OutputStreamWriter( os );
+        String sha1sum = sha1Digest.calc( new File( path ) );
+        if ( !isValid )
+        {
+            osw.write( sha1sum + "2" );
+        }
+        else
+        {
+            osw.write( sha1sum );
+        }
+        osw.close();
+    }
+
+    /**
+     * Create the sample file that will be included in the jar.
+     *
+     * @param filename
+     */
+    private void createSampleFile( String filename )
+        throws IOException
+    {
+        File file = new File( filename );
+        OutputStream os = new FileOutputStream( file );
+        OutputStreamWriter osw = new OutputStreamWriter( os );
+        osw.write( "This is the content of the sample file that will be included in the jar file." );
+        osw.close();
+    }
+
+    /**
+     * Delete the test directory created in the repository.
+     *
+     * @param dir The directory to be deleted.
+     */
+    protected void deleteTestDirectory( File dir )
+    {
+        try
+        {
+            FileUtils.deleteDirectory( dir );
+        }
+        catch ( IOException e )
+        {
+            // ignore
+        }
+    }
+
+    private void deleteFile( String filename )
+    {
+        File f = new File( filename );
+        f.delete();
+    }
+
+    protected void deleteChecksumFiles( String type )
+    {
+        //delete valid checksum files of artifacts created
+        for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+        {
+            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+                "/" + validArtifactChecksumJars[i] + "." + type + ".md5" );
+
+            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+                "/" + validArtifactChecksumJars[i] + "." + type + ".sha1" );
+        }
+
+        //delete valid checksum files of metadata file
+        for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+        {
+            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+                "/" + metadataChecksumFilename + ".xml.md5" );
+
+            deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+                "/" + metadataChecksumFilename + ".xml.sha1" );
+        }
+    }
+
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumArtifactReporterTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/ChecksumArtifactReporterTest.java
new file mode 100644 (file)
index 0000000..5a9ac74
--- /dev/null
@@ -0,0 +1,161 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.archiva.reporting.reporter.AbstractChecksumArtifactReporterTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * This class tests the ChecksumArtifactReportProcessor.
+ * It extends the AbstractChecksumArtifactReporterTestCase class.
+ */
+public class ChecksumArtifactReporterTest
+    extends AbstractChecksumArtifactReporterTestCase
+{
+    private ArtifactReportProcessor artifactReportProcessor;
+
+    private ReportingDatabase reportingDatabase;
+
+    private MetadataReportProcessor metadataReportProcessor;
+
+    public void setUp()
+        throws Exception
+    {
+        super.setUp();
+        artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+        metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportingDatabase = new ReportingDatabase( reportGroup );
+    }
+
+    /**
+     * Test the ChecksumArtifactReportProcessor when the checksum files are valid.
+     */
+    public void testChecksumArtifactReporterSuccess()
+        throws DigesterException, IOException
+    {
+        createChecksumFile( "VALID" );
+        createChecksumFile( "INVALID" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
+        assertEquals( 0, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the ChecksumArtifactReportProcessor when the checksum files are invalid.
+     */
+    public void testChecksumArtifactReporterFailed()
+    {
+        String s = "invalidArtifact";
+        String s1 = "1.0";
+        Artifact artifact = createArtifact( "checksumTest", s, s1 );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+        assertEquals( 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+    }
+
+    /**
+     * Test the valid checksum of a metadata file.
+     * The reportingDatabase should report 2 success validation.
+     */
+    public void testChecksumMetadataReporterSuccess()
+        throws DigesterException, IOException
+    {
+        createMetadataFile( "VALID" );
+        createMetadataFile( "INVALID" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        //Version level metadata
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        //Artifact level metadata
+        metadata = new ArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        //Group level metadata
+        metadata = new GroupRepositoryMetadata( "checksumTest" );
+        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+    }
+
+    /**
+     * Test the corrupted checksum of a metadata file.
+     * The reportingDatabase must report 2 failures.
+     */
+    public void testChecksumMetadataReporterFailure()
+    {
+        Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+    }
+
+    /**
+     * Test the conditional when the checksum files of the artifact & metadata do not exist.
+     */
+    public void testChecksumFilesDoNotExist()
+        throws DigesterException, IOException
+    {
+        createChecksumFile( "VALID" );
+        createMetadataFile( "VALID" );
+        deleteChecksumFiles( "jar" );
+
+        Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+        artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
+        assertEquals( 1, reportingDatabase.getNumFailures() );
+
+        RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+        metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+
+        deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+    }
+}
diff --git a/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/DefaultArtifactReporterTest.java b/archiva-reports-standard/src/test/java/org/apache/maven/archiva/reporting/reporter/DefaultArtifactReporterTest.java
new file mode 100644 (file)
index 0000000..e7d955d
--- /dev/null
@@ -0,0 +1,368 @@
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+
+import java.util.Iterator;
+
+/**
+ *
+ */
+public class DefaultArtifactReporterTest
+    extends AbstractRepositoryReportsTestCase
+{
+    private ReportingDatabase reportingDatabase;
+
+    private RepositoryMetadata metadata;
+
+    private static final String PROCESSOR = "processor";
+
+    private static final String PROBLEM = "problem";
+
+    private Artifact artifact;
+
+    public void testEmptyArtifactReporter()
+    {
+        assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+        assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
+        assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
+    }
+
+    public void testMetadataSingleFailure()
+    {
+        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more failures", failures.hasNext() );
+    }
+
+    private void assertMetadata( MetadataResults result )
+    {
+        assertEquals( "check failure cause", metadata.getGroupId(), result.getGroupId() );
+        assertEquals( "check failure cause", metadata.getArtifactId(), result.getArtifactId() );
+        assertEquals( "check failure cause", metadata.getBaseVersion(), result.getVersion() );
+    }
+
+    public void testMetadataMultipleFailures()
+    {
+        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+        reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        MetadataResults results = (MetadataResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd failure", failures.hasNext() );
+        result = (Result) failures.next();
+        assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more failures", failures.hasNext() );
+    }
+
+    public void testMetadataSingleWarning()
+    {
+        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        MetadataResults results = (MetadataResults) warnings.next();
+        warnings = results.getWarnings().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testMetadataMultipleWarnings()
+    {
+        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+        reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        MetadataResults results = (MetadataResults) warnings.next();
+        warnings = results.getWarnings().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "First Warning", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd warning", warnings.hasNext() );
+        result = (Result) warnings.next();
+        assertEquals( "check failure reason", "Second Warning", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testMetadataSingleNotice()
+    {
+        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        MetadataResults results = (MetadataResults) warnings.next();
+        warnings = results.getNotices().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "Single Notice Message", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testMetadataMultipleNotices()
+    {
+        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
+        reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getMetadataIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        MetadataResults results = (MetadataResults) warnings.next();
+        warnings = results.getNotices().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertMetadata( results );
+        assertEquals( "check failure reason", "First Notice", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd warning", warnings.hasNext() );
+        result = (Result) warnings.next();
+        assertEquals( "check failure reason", "Second Notice", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testArtifactSingleFailure()
+    {
+        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
+        assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        ArtifactResults results = (ArtifactResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more failures", failures.hasNext() );
+    }
+
+    private void assertArtifact( ArtifactResults results )
+    {
+        assertEquals( "check failure cause", artifact.getGroupId(), results.getGroupId() );
+        assertEquals( "check failure cause", artifact.getArtifactId(), results.getArtifactId() );
+        assertEquals( "check failure cause", artifact.getVersion(), results.getVersion() );
+        assertEquals( "check failure cause", artifact.getClassifier(), results.getClassifier() );
+        assertEquals( "check failure cause", artifact.getType(), results.getType() );
+    }
+
+    public void testArtifactMultipleFailures()
+    {
+        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
+        reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
+        assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator failures = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        ArtifactResults results = (ArtifactResults) failures.next();
+        failures = results.getFailures().iterator();
+        assertTrue( "check there is a failure", failures.hasNext() );
+        Result result = (Result) failures.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd failure", failures.hasNext() );
+        result = (Result) failures.next();
+        assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more failures", failures.hasNext() );
+    }
+
+    public void testArtifactSingleWarning()
+    {
+        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        ArtifactResults results = (ArtifactResults) warnings.next();
+        warnings = results.getWarnings().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testArtifactMultipleWarnings()
+    {
+        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
+        reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        ArtifactResults results = (ArtifactResults) warnings.next();
+        warnings = results.getWarnings().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "First Warning", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd warning", warnings.hasNext() );
+        result = (Result) warnings.next();
+        assertEquals( "check failure reason", "Second Warning", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testArtifactSingleNotice()
+    {
+        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
+        assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        ArtifactResults results = (ArtifactResults) warnings.next();
+        warnings = results.getNotices().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "Single Notice Message", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    public void testArtifactMultipleNotices()
+    {
+        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
+        reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
+        assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
+        assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+
+        Iterator warnings = reportingDatabase.getArtifactIterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        ArtifactResults results = (ArtifactResults) warnings.next();
+        warnings = results.getNotices().iterator();
+        assertTrue( "check there is a failure", warnings.hasNext() );
+        Result result = (Result) warnings.next();
+        assertArtifact( results );
+        assertEquals( "check failure reason", "First Notice", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertTrue( "must have 2nd warning", warnings.hasNext() );
+        result = (Result) warnings.next();
+        assertEquals( "check failure reason", "Second Notice", result.getReason() );
+        assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+        assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
+        assertFalse( "no more warnings", warnings.hasNext() );
+    }
+
+    protected void setUp()
+        throws Exception
+    {
+        super.setUp();
+
+        ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+        artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+        Versioning versioning = new Versioning();
+        versioning.addVersion( "1.0-alpha-1" );
+        versioning.addVersion( "1.0-alpha-2" );
+
+        metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+        ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
+        reportingDatabase = new ReportingDatabase( reportGroup );
+    }
+}
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/DuplicateArtifactFileReportProcessorTest.xml
deleted file mode 100644 (file)
index bda4e9a..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
-  ~ Copyright 2005-2006 The Apache Software Foundation.
-  ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
-  ~
-  ~      http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<component-set>
-  <components>
-    <component>
-      <role>org.apache.maven.archiva.reporting.ArtifactReportProcessor</role>
-      <role-hint>duplicate</role-hint>
-      <implementation>org.apache.maven.archiva.reporting.DuplicateArtifactFileReportProcessor</implementation>
-      <requirements>
-        <requirement>
-          <role>org.codehaus.plexus.digest.Digester</role>
-          <role-hint>md5</role-hint>
-        </requirement>
-        <requirement>
-          <role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
-        </requirement>
-      </requirements>
-      <configuration>
-        <indexDirectory>${basedir}/target/indexDirectory</indexDirectory>
-      </configuration>
-    </component>
-  </components>
-</component-set>
\ No newline at end of file
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldArtifactReportProcessorTest.xml
deleted file mode 100644 (file)
index 34b01e4..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
-  ~ Copyright 2005-2006 The Apache Software Foundation.
-  ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
-  ~
-  ~      http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<component-set>
-  <components>
-    <component>
-      <role>org.apache.maven.archiva.reporting.ArtifactReportProcessor</role>
-      <role-hint>old-artifact</role-hint>
-      <implementation>org.apache.maven.archiva.reporting.OldArtifactReportProcessor</implementation>
-      <configuration>
-        <maxAge>10</maxAge>
-      </configuration>
-    </component>
-  </components>
-</component-set>
\ No newline at end of file
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/OldSnapshotArtifactReportProcessorTest.xml
deleted file mode 100644 (file)
index 1f7cfce..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
-  ~ Copyright 2005-2006 The Apache Software Foundation.
-  ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
-  ~
-  ~      http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<component-set>
-  <components>
-    <component>
-      <role>org.apache.maven.archiva.reporting.ArtifactReportProcessor</role>
-      <role-hint>old-snapshot-artifact</role-hint>
-      <implementation>org.apache.maven.archiva.reporting.OldSnapshotArtifactReportProcessor</implementation>
-      <configuration>
-        <maxAge>3600</maxAge>
-        <maxSnapshots>2</maxSnapshots>
-      </configuration>
-    </component>
-  </components>
-</component-set>
\ No newline at end of file
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessorTest.xml
new file mode 100644 (file)
index 0000000..5cef197
--- /dev/null
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Copyright 2005-2006 The Apache Software Foundation.
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
+      <role-hint>duplicate</role-hint>
+      <implementation>org.apache.maven.archiva.reporting.processor.DuplicateArtifactFileReportProcessor</implementation>
+      <requirements>
+        <requirement>
+          <role>org.codehaus.plexus.digest.Digester</role>
+          <role-hint>md5</role-hint>
+        </requirement>
+        <requirement>
+          <role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
+        </requirement>
+      </requirements>
+      <configuration>
+        <indexDirectory>${basedir}/target/indexDirectory</indexDirectory>
+      </configuration>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessorTest.xml
new file mode 100644 (file)
index 0000000..0f9493b
--- /dev/null
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Copyright 2005-2006 The Apache Software Foundation.
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
+      <role-hint>old-artifact</role-hint>
+      <implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
+      <configuration>
+        <maxAge>10</maxAge>
+      </configuration>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file
diff --git a/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.xml b/archiva-reports-standard/src/test/resources/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessorTest.xml
new file mode 100644 (file)
index 0000000..0cf6183
--- /dev/null
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+  ~ Copyright 2005-2006 The Apache Software Foundation.
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~      http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<component-set>
+  <components>
+    <component>
+      <role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
+      <role-hint>old-snapshot-artifact</role-hint>
+      <implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
+      <configuration>
+        <maxAge>3600</maxAge>
+        <maxSnapshots>2</maxSnapshots>
+      </configuration>
+    </component>
+  </components>
+</component-set>
\ No newline at end of file