import org.apache.maven.archiva.converter.transaction.FileTransaction;
import org.apache.maven.archiva.digest.Digester;
import org.apache.maven.archiva.digest.DigesterException;
-import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
*/
private I18N i18n;
- public void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
throws RepositoryConversionException
{
if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
{
transaction.commit();
}
- reporter.addSuccess( artifact );
}
}
}
return metadata;
}
- private boolean validateMetadata( Artifact artifact, ArtifactReporter reporter )
+ private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
throws RepositoryConversionException
{
ArtifactRepository repository = artifact.getRepository();
}
private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
- ArtifactReporter reporter )
+ ReportingDatabase reporter )
{
String groupIdKey;
String artifactIdKey = null;
return result;
}
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
FileTransaction transaction )
throws RepositoryConversionException
{
return i18n.getString( getClass().getName(), Locale.getDefault(), key );
}
- private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
+ private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
throws IOException
{
return result;
}
- private boolean verifyChecksum( File file, String fileName, Digester digester, ArtifactReporter reporter,
+ private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
Artifact artifact, String key )
throws IOException
{
return result;
}
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
FileTransaction transaction )
throws RepositoryConversionException
{
return result;
}
- public void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
throws RepositoryConversionException
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
* limitations under the License.
*/
-import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
* @param targetRepository the target repository
* @param reporter reporter to track the results of the conversion
*/
- void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
throws RepositoryConversionException;
/**
* @param targetRepository the target repository
* @param reporter reporter to track the results of the conversions
*/
- void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
throws RepositoryConversionException;
}
* limitations under the License.
*/
-import org.apache.maven.archiva.reporting.ArtifactReporter;
-import org.apache.maven.archiva.reporting.ArtifactResult;
-import org.apache.maven.archiva.reporting.DefaultArtifactReporter;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
private ArtifactFactory artifactFactory;
- private ArtifactReporter reporter;
+ private ReportingDatabase reporter;
private static final int SLEEP_MILLIS = 100;
i18n = (I18N) lookup( I18N.ROLE );
- reporter = new DefaultArtifactReporter();
+ reporter = new ReportingDatabase();
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
repositoryConverter.convert( artifact, targetRepository, reporter );
assertEquals( "check no errors", 0, reporter.getNumFailures() );
assertEquals( "check number of warnings", 2, reporter.getNumWarnings() );
- assertEquals( "check success", 1, reporter.getNumSuccesses() );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
repositoryConverter.convert( artifact, targetRepository, reporter );
assertEquals( "check no errors", 0, reporter.getNumFailures() );
assertEquals( "check no warnings", 1, reporter.getNumWarnings() );
- assertEquals( "check success", 1, reporter.getNumSuccesses() );
assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
repositoryConverter.convert( artifacts, targetRepository, reporter );
assertEquals( "check no errors", 0, reporter.getNumFailures() );
assertEquals( "check no warnings", 0, reporter.getNumWarnings() );
- assertEquals( "check successes", 3, reporter.getNumSuccesses() );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
{
assertEquals( "check no errors", 0, reporter.getNumFailures() );
assertEquals( "check no warnings", 0, reporter.getNumWarnings() );
- assertEquals( "check success", 1, reporter.getNumSuccesses() );
}
private void checkFailure()
{
assertEquals( "check num errors", 1, reporter.getNumFailures() );
assertEquals( "check no warnings", 0, reporter.getNumWarnings() );
- assertEquals( "check no success", 0, reporter.getNumSuccesses() );
}
private String getI18nString( String key )
return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
}
- private ArtifactResult getFailure()
+ private Result getFailure()
{
- return (ArtifactResult) reporter.getArtifactFailureIterator().next();
+ ArtifactResults artifact = (ArtifactResults) reporter.getArtifactIterator().next();
+ return (Result) artifact.getFailures().get( 0 );
}
- private ArtifactResult getWarning()
+ private Result getWarning()
{
- return (ArtifactResult) reporter.getArtifactWarningIterator().next();
+ ArtifactResults artifact = (ArtifactResults) reporter.getArtifactIterator().next();
+ return (Result) artifact.getWarnings().get( 0 );
}
private void createModernSourceRepository()
import org.apache.maven.archiva.discoverer.DiscovererException;
import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
+import org.apache.maven.archiva.reporting.ReportingStore;
+import org.apache.maven.archiva.reporting.ReportingStoreException;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
/**
* @plexus.requirement
*/
- private ArtifactReporter reporter;
+ private ReportingStore reportingStore;
public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
boolean includeSnapshots )
includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter();
List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, null, filter );
- repositoryConverter.convert( legacyArtifacts, repository, reporter );
+ ReportingDatabase reporter;
+ try
+ {
+ reporter = reportingStore.getReportsFromStore( repository );
+
+ repositoryConverter.convert( legacyArtifacts, repository, reporter );
+
+ reportingStore.storeReports( reporter, repository );
+ }
+ catch ( ReportingStoreException e )
+ {
+ throw new RepositoryConversionException( "Error convering legacy repository.", e );
+ }
}
}
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.reporting.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.ReportingDatabase;
+import org.apache.maven.archiva.reporting.ReportingStore;
+import org.apache.maven.archiva.reporting.ReportingStoreException;
import org.apache.maven.archiva.scheduler.TaskExecutionException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
*/
private Map artifactDiscoverers;
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.reporting.ArtifactReportProcessor"
+ */
+ private List artifactReports;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
+ */
+ private Map metadataDiscoverers;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.reporting.MetadataReportProcessor"
+ */
+ private List metadataReports;
+
/**
* @plexus.requirement role-hint="standard"
*/
private RepositoryIndexRecordFactory recordFactory;
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ private static final int ARTIFACT_BUFFER_SIZE = 1000;
+
+ /**
+ * @plexus.requirement
+ */
+ private ReportingStore reportingStore;
+
public void execute()
throws TaskExecutionException
{
ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+ getLogger().debug(
+ "Reading previous report database from repository " + repositoryConfiguration.getName() );
+ ReportingDatabase reporter = reportingStore.getReportsFromStore( repository );
+
+ // Discovery process
String layoutProperty = repositoryConfiguration.getLayout();
ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
AndArtifactFilter filter = new AndArtifactFilter();
getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
+
if ( !artifacts.isEmpty() )
{
- // TODO! reporting
+ getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
+
+ // Work through these in batches, then flush the project cache.
+ for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
+ {
+ int end = j + ARTIFACT_BUFFER_SIZE;
+ List currentArtifacts =
+ artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
- getLogger().info( "Indexing " + artifacts.size() + " new artifacts" );
- index.indexArtifacts( artifacts, recordFactory );
+ // run the reports
+ runArtifactReports( currentArtifacts, reporter );
+
+ index.indexArtifacts( currentArtifacts, recordFactory );
+ }
+
+ // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
+ // around that. TODO: remove when it is configurable
+ flushProjectBuilderCacheHack();
}
+
+ // TODO! use reporting manager as a filter
+ MetadataDiscoverer metadataDiscoverer =
+ (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
+ metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns );
+
+ //TODO! metadata reporting
+
+ reportingStore.storeReports( reporter, repository );
}
}
}
{
throw new TaskExecutionException( e.getMessage(), e );
}
+ catch ( ReportingStoreException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
time = System.currentTimeMillis() - time;
getLogger().info( "Finished repository indexing process in " + time + "ms" );
}
+ private void runArtifactReports( List artifacts, ReportingDatabase reporter )
+ {
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+
+ ArtifactRepository repository = artifact.getRepository();
+
+ Model model = null;
+ try
+ {
+ Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
+ artifact.getArtifactId(),
+ artifact.getVersion() );
+ MavenProject project =
+ projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+ model = project.getModel();
+ }
+ catch ( ProjectBuildingException e )
+ {
+ reporter.addWarning( artifact, "Error reading project model: " + e );
+ }
+ runArtifactReports( artifact, model, reporter );
+ }
+ }
+
+ private void runArtifactReports( Artifact artifact, Model model, ReportingDatabase reporter )
+ {
+ // TODO: should the report set be limitable by configuration?
+ for ( Iterator i = artifactReports.iterator(); i.hasNext(); )
+ {
+ ArtifactReportProcessor report = (ArtifactReportProcessor) i.next();
+
+ report.processArtifact( artifact, model, reporter );
+ }
+ }
+
public void executeNowIfNeeded()
throws TaskExecutionException
{
throw new TaskExecutionException( e.getMessage(), e );
}
}
+
+ /**
+ * @todo remove when no longer needed (MNG-142)
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ private void flushProjectBuilderCacheHack()
+ {
+ try
+ {
+ if ( projectBuilder != null )
+ {
+ java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
+ f.setAccessible( true );
+ Map cache = (Map) f.get( projectBuilder );
+ cache.clear();
+
+ f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
+ f.setAccessible( true );
+ cache = (Map) f.get( projectBuilder );
+ cache.clear();
+ }
+ }
+ catch ( NoSuchFieldException e )
+ {
+ throw new RuntimeException( e );
+ }
+ catch ( IllegalAccessException e )
+ {
+ throw new RuntimeException( e );
+ }
+ }
+
}
{
artifacts.add( artifact );
}
- // TODO: else add to excluded? [!]
+ else
+ {
+ addExcludedPath( path, "Omitted by filter" );
+ }
}
catch ( DiscovererException e )
{
}
}
+ /**
+ * Add a path to the list of files that were excluded.
+ *
+ * @param path the path to add
+ * @param reason the reason why the path is excluded
+ */
+ protected void addExcludedPath( String path, String reason )
+ {
+ excludedPaths.add( new DiscovererPath( path, reason ) );
+ }
+
/**
* Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
*
*/
private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
- public List discoverMetadata( ArtifactRepository repository, String operation, List blacklistedPatterns )
+ public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
throws DiscovererException
{
if ( !"file".equals( repository.getProtocol() ) )
* Search for metadata files in the repository.
*
* @param repository The repository.
- * @param operation the operation being performed (used for timestamp comparison)
* @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
* @return the list of artifacts found
* @throws DiscovererException if there is a problem during the discovery process
*/
- List discoverMetadata( ArtifactRepository repository, String operation, List blacklistedPatterns )
+ List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
throws DiscovererException;
}
public void testKickoutWrongDirectory()
throws DiscovererException
{
- discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ discoverer.discoverMetadata( repository, null );
Iterator iter = discoverer.getKickedOutPathsIterator();
boolean found = false;
while ( iter.hasNext() && !found )
public void testKickoutBlankMetadata()
throws DiscovererException
{
- discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ discoverer.discoverMetadata( repository, null );
Iterator iter = discoverer.getKickedOutPathsIterator();
boolean found = false;
while ( iter.hasNext() && !found )
public void testDiscoverMetadata()
throws DiscovererException
{
- List metadataPaths = discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ List metadataPaths = discoverer.discoverMetadata( repository, null );
assertNotNull( "Check metadata not null", metadataPaths );
RepositoryMetadata metadata =
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.project.MavenProjectBuilder;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
/**
private static Analyzer luceneAnalyzer = new LuceneAnalyzer();
- private MavenProjectBuilder projectBuilder;
-
private static long lastUpdatedTime = 0;
public LuceneRepositoryArtifactIndex( File indexPath, LuceneIndexRecordConverter converter )
this.converter = converter;
}
- public LuceneRepositoryArtifactIndex( File indexLocation, LuceneIndexRecordConverter converter,
- MavenProjectBuilder projectBuilder )
- {
- this.indexLocation = indexLocation;
- this.converter = converter;
- this.projectBuilder = projectBuilder;
- }
-
public void indexRecords( Collection records )
throws RepositoryIndexException
{
{
indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
- int count = 0;
- for ( Iterator i = artifacts.iterator(); i.hasNext(); count++ )
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
Artifact artifact = (Artifact) i.next();
RepositoryIndexRecord record = factory.createRecord( artifact );
indexModifier.addDocument( document );
}
-
- if ( count % 100 == 0 )
- {
- // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
- // around that. TODO: remove when it is configurable
- flushProjectBuilderCacheHack();
- }
}
indexModifier.optimize();
}
return new ArrayList( results );
}
- private void flushProjectBuilderCacheHack()
- {
- try
- {
- if ( projectBuilder != null )
- {
- java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
- f.setAccessible( true );
- Map cache = (Map) f.get( projectBuilder );
- cache.clear();
-
- f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
- f.setAccessible( true );
- cache = (Map) f.get( projectBuilder );
- cache.clear();
- }
- }
- catch ( NoSuchFieldException e )
- {
- throw new RuntimeException( e );
- }
- catch ( IllegalAccessException e )
- {
- throw new RuntimeException( e );
- }
- }
-
public boolean exists()
throws RepositoryIndexException
{
import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.project.MavenProjectBuilder;
import java.io.File;
public class LuceneRepositoryArtifactIndexFactory
implements RepositoryArtifactIndexFactory
{
- /**
- * @plexus.requirement
- * @todo remove when MNG-142 is fixed
- */
- private MavenProjectBuilder projectBuilder;
-
public RepositoryArtifactIndex createStandardIndex( File indexPath )
{
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter(), projectBuilder );
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter() );
}
public RepositoryArtifactIndex createMinimalIndex( File indexPath )
{
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter(), projectBuilder );
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter() );
}
}
<artifactId>archiva-indexer</artifactId>
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.modello</groupId>
+ <artifactId>modello-maven-plugin</artifactId>
+ <version>1.0-alpha-10</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>xpp3-writer</goal>
+ <goal>java</goal>
+ <goal>xpp3-reader</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <version>1.0.0</version>
+ <model>src/main/mdo/reporting.mdo</model>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>cobertura-maven-plugin</artifactId>
+ <configuration>
+ <instrumentation>
+ <!-- exclude generated -->
+ <excludes>
+ <exclude>org/apache/maven/archiva/reporting/model/**</exclude>
+ </excludes>
+ </instrumentation>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
</project>
*/
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
/**
{
String ROLE = ArtifactReportProcessor.class.getName();
- void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter, ArtifactRepository repository )
- throws ReportProcessorException;
-
+ void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Iterator;
-
-/**
- * This interface is used by the single artifact processor.
- * <p/>
- * The initial implementation of this will just need to be a mock implementation in src/test/java, used to track the
- * failures and successes for checking assertions. Later, implementations will be made to present reports on the
- * web interface, send them via mail, and so on.
- *
- * @todo i18n, including message formatting and parameterisation
- */
-public interface ArtifactReporter
-{
- String ROLE = ArtifactReporter.class.getName();
-
- String ARTIFACT_NOT_FOUND = "Artifact does not exist in the repository";
-
- String DEPENDENCY_NOT_FOUND = "Artifact's dependency does not exist in the repository";
-
- String DEPENDENCY_INVALID_VERSION = "Artifact's dependency contains an invalid version";
-
- void addFailure( Artifact artifact, String reason );
-
- void addSuccess( Artifact artifact );
-
- void addWarning( Artifact artifact, String message );
-
- void addFailure( RepositoryMetadata metadata, String reason );
-
- void addSuccess( RepositoryMetadata metadata );
-
- void addWarning( RepositoryMetadata metadata, String message );
-
- Iterator getArtifactFailureIterator();
-
- Iterator getArtifactSuccessIterator();
-
- Iterator getArtifactWarningIterator();
-
- Iterator getRepositoryMetadataFailureIterator();
-
- Iterator getRepositoryMetadataSuccessIterator();
-
- Iterator getRepositoryMetadataWarningIterator();
-
- int getNumFailures();
-
- int getNumSuccesses();
-
- int getNumWarnings();
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * A result of the report for a given artifact being processed.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class ArtifactResult
-{
- private final Artifact artifact;
-
- private final String reason;
-
- public ArtifactResult( Artifact artifact )
- {
- this.artifact = artifact;
- this.reason = null;
- }
-
- public ArtifactResult( Artifact artifact, String reason )
- {
- this.artifact = artifact;
- this.reason = reason;
- }
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public String getReason()
- {
- return reason;
- }
-}
*
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- * @throws ReportProcessorException if an error was occurred while processing the metadata
+ * @param reporter the ReportingDatabase to receive processing results
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- throws ReportProcessorException
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
{
- boolean hasFailures = false;
-
if ( metadata.storedInGroupDirectory() )
{
try
{
- hasFailures = checkPluginMetadata( metadata, repository, reporter );
+ checkPluginMetadata( metadata, repository, reporter );
}
catch ( IOException e )
{
- throw new ReportProcessorException( "Error getting plugin artifact directories versions", e );
+ reporter.addWarning( metadata, "Error getting plugin artifact directories versions: " + e );
}
}
else
if ( lastUpdated == null || lastUpdated.length() == 0 )
{
reporter.addFailure( metadata, "Missing lastUpdated element inside the metadata." );
- hasFailures = true;
}
if ( metadata.storedInArtifactVersionDirectory() )
{
- hasFailures |= checkSnapshotMetadata( metadata, repository, reporter );
+ checkSnapshotMetadata( metadata, repository, reporter );
}
else
{
- if ( !checkMetadataVersions( metadata, repository, reporter ) )
- {
- hasFailures = true;
- }
+ checkMetadataVersions( metadata, repository, reporter );
try
{
- if ( checkRepositoryVersions( metadata, repository, reporter ) )
- {
- hasFailures = true;
- }
+ checkRepositoryVersions( metadata, repository, reporter );
}
catch ( IOException e )
{
- throw new ReportProcessorException( "Error getting versions", e );
+ reporter.addWarning( metadata, "Error getting plugin artifact directories versions: " + e );
}
}
}
-
- if ( !hasFailures )
- {
- reporter.addSuccess( metadata );
- }
}
/**
*
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
+ * @param reporter the ReportingDatabase to receive processing results
*/
- private boolean checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
+ private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
throws IOException
{
- boolean hasFailures = false;
-
File metadataDir =
new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
List pluginDirs = getArtifactIdFiles( metadataDir );
String artifactId = plugin.getArtifactId();
if ( artifactId == null || artifactId.length() == 0 )
{
- reporter.addFailure( metadata, "Missing or empty artifactId in group metadata." );
- hasFailures = true;
+ reporter.addFailure( metadata,
+ "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
}
String prefix = plugin.getPrefix();
if ( prefix == null || prefix.length() == 0 )
{
reporter.addFailure( metadata, "Missing or empty plugin prefix for artifactId " + artifactId + "." );
- hasFailures = true;
}
else
{
if ( prefixes.containsKey( prefix ) )
{
reporter.addFailure( metadata, "Duplicate plugin prefix found: " + prefix + "." );
- hasFailures = true;
}
else
{
if ( !pluginDirs.contains( pluginDir ) )
{
reporter.addFailure( metadata, "Metadata plugin " + artifactId + " not found in the repository" );
- hasFailures = true;
}
else
{
reporter.addFailure( metadata, "Plugin " + plugin.getName() + " is present in the repository but " +
"missing in the metadata." );
}
- hasFailures = true;
}
-
- return hasFailures;
}
/**
*
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
+ * @param reporter the ReportingDatabase to receive processing results
*/
- private boolean checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
+ private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
{
RepositoryQueryLayer repositoryQueryLayer =
repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
- boolean hasFailures = false;
-
Snapshot snapshot = metadata.getMetadata().getVersioning().getSnapshot();
String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
reporter.addFailure( metadata, "Snapshot artifact " + version + " does not exist." );
- hasFailures = true;
}
-
- return hasFailures;
}
/**
*
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
+ * @param reporter the ReportingDatabase to receive processing results
*/
- private boolean checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
+ private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
{
RepositoryQueryLayer repositoryQueryLayer =
repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
- boolean hasFailures = false;
Versioning versioning = metadata.getMetadata().getVersioning();
for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
{
{
reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
"missing in the repository." );
- hasFailures = true;
}
}
- return hasFailures;
}
/**
*
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
+ * @param reporter the ReportingDatabase to receive processing results
*/
- private boolean checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
+ private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
throws IOException
{
- boolean hasFailures = false;
Versioning versioning = metadata.getMetadata().getVersioning();
File versionsDir =
new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
{
reporter.addFailure( metadata, "Artifact version " + version + " found in the repository but " +
"missing in the metadata." );
- hasFailures = true;
}
}
- return hasFailures;
}
/**
{
List artifactIdFiles = new ArrayList();
- List fileArray = new ArrayList( Arrays.asList( groupIdDir.listFiles() ) );
- for ( Iterator files = fileArray.iterator(); files.hasNext(); )
+ File[] files = groupIdDir.listFiles();
+ if ( files != null )
{
- File artifactDir = (File) files.next();
-
- if ( artifactDir.isDirectory() )
+ for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
{
- List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
- if ( versions.size() > 0 )
+ File artifactDir = (File) i.next();
+
+ if ( artifactDir.isDirectory() )
{
- artifactIdFiles.add( artifactDir );
+ List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
+ if ( versions.size() > 0 )
+ {
+ artifactIdFiles.add( artifactDir );
+ }
}
}
}
*/
private Digester md5Digester;
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
+ public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
+ ArtifactRepository repository = artifact.getRepository();
+
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
String path = repository.pathOf( artifact );
File file = new File( repository.getBasedir(), path );
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
+ // TODO: make md5 configurable
+// verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ArtifactReporter reporter, Artifact artifact )
+ ReportingDatabase reporter, Artifact artifact )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
try
{
digester.verify( file, FileUtils.fileRead( checksumFile ) );
-
- reporter.addSuccess( artifact );
}
catch ( DigesterException e )
{
* Validate the checksums of the metadata. Get the metadata file from the
* repository then validate the checksum.
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
{
if ( !"file".equals( repository.getProtocol() ) )
{
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ArtifactReporter reporter, RepositoryMetadata metadata )
+ ReportingDatabase reporter, RepositoryMetadata metadata )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
try
{
digester.verify( file, FileUtils.fileRead( checksumFile ) );
-
- reporter.addSuccess( metadata );
}
catch ( DigesterException e )
{
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.digest.Digester;
-import org.apache.maven.archiva.digest.DigesterException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * This class reports invalid and mismatched checksums of artifacts and metadata files.
- * It validates MD5 and SHA-1 checksums.
- *
- * @plexus.component role="org.apache.maven.archiva.reporting.MetadataReportProcessor" role-hint="checksum-metadata"
- */
-public class ChecksumMetadataReporter
- implements MetadataReportProcessor
-{
- /**
- * @plexus.requirement role-hint="sha1"
- */
- private Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- private Digester md5Digester;
-
- /**
- * Validate the checksums of the metadata. Get the metadata file from the
- * repository then validate the checksum.
- */
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
- }
-
- //check if checksum files exist
- String path = repository.pathOfRemoteRepositoryMetadata( metadata );
- File file = new File( repository.getBasedir(), path );
-
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
- }
-
- private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ArtifactReporter reporter, RepositoryMetadata metadata )
- {
- File checksumFile = new File( repository.getBasedir(), path );
- if ( checksumFile.exists() )
- {
- try
- {
- digester.verify( file, FileUtils.fileRead( checksumFile ) );
-
- reporter.addSuccess( metadata );
- }
- catch ( DigesterException e )
- {
- reporter.addFailure( metadata, e.getMessage() );
- }
- catch ( IOException e )
- {
- reporter.addFailure( metadata, "Read file error: " + e.getMessage() );
- }
- }
- else
- {
- reporter.addFailure( metadata, digester.getAlgorithm() + " checksum file does not exist." );
- }
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReporter"
- */
-public class DefaultArtifactReporter
- implements ArtifactReporter
-{
- private List artifactFailures = new ArrayList();
-
- private List artifactSuccesses = new ArrayList();
-
- private List artifactWarnings = new ArrayList();
-
- private List metadataFailures = new ArrayList();
-
- private List metadataSuccesses = new ArrayList();
-
- private List metadataWarnings = new ArrayList();
-
- public void addFailure( Artifact artifact, String reason )
- {
- artifactFailures.add( new ArtifactResult( artifact, reason ) );
- }
-
- public void addSuccess( Artifact artifact )
- {
- artifactSuccesses.add( new ArtifactResult( artifact ) );
- }
-
- public void addWarning( Artifact artifact, String message )
- {
- artifactWarnings.add( new ArtifactResult( artifact, message ) );
- }
-
- public void addFailure( RepositoryMetadata metadata, String reason )
- {
- metadataFailures.add( new RepositoryMetadataResult( metadata, reason ) );
- }
-
- public void addSuccess( RepositoryMetadata metadata )
- {
- metadataSuccesses.add( new RepositoryMetadataResult( metadata ) );
- }
-
- public void addWarning( RepositoryMetadata metadata, String message )
- {
- metadataWarnings.add( new RepositoryMetadataResult( metadata, message ) );
- }
-
- public Iterator getArtifactFailureIterator()
- {
- return artifactFailures.iterator();
- }
-
- public Iterator getArtifactSuccessIterator()
- {
- return artifactSuccesses.iterator();
- }
-
- public Iterator getArtifactWarningIterator()
- {
- return artifactWarnings.iterator();
- }
-
- public Iterator getRepositoryMetadataFailureIterator()
- {
- return metadataFailures.iterator();
- }
-
- public Iterator getRepositoryMetadataSuccessIterator()
- {
- return metadataSuccesses.iterator();
- }
-
- public Iterator getRepositoryMetadataWarningIterator()
- {
- return metadataWarnings.iterator();
- }
-
- public int getNumFailures()
- {
- return artifactFailures.size() + metadataFailures.size();
- }
-
- public int getNumSuccesses()
- {
- return artifactSuccesses.size() + metadataSuccesses.size();
- }
-
- public int getNumWarnings()
- {
- return artifactWarnings.size() + metadataWarnings.size();
- }
-}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
+import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
+ * can continue to be used.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
+ * @todo support other implementations than XML file
+ * @plexus.component
+ */
+public class DefaultReportingStore
+ extends AbstractLogEnabled
+ implements ReportingStore
+{
+ /**
+ * The cached reports for given repositories.
+ */
+ private Map/*<ArtifactRepository,ReportingDatabase>*/ reports = new HashMap();
+
+ public ReportingDatabase getReportsFromStore( ArtifactRepository repository )
+ throws ReportingStoreException
+ {
+ ReportingDatabase database = (ReportingDatabase) reports.get( repository );
+
+ if ( database == null )
+ {
+ ReportingXpp3Reader reader = new ReportingXpp3Reader();
+
+ File file = new File( repository.getBasedir(), "report-database.xml" );
+
+ FileReader fileReader = null;
+ try
+ {
+ fileReader = new FileReader( file );
+ }
+ catch ( FileNotFoundException e )
+ {
+ database = new ReportingDatabase();
+ }
+
+ if ( database == null )
+ {
+ getLogger().info( "Reading report database from " + file );
+ try
+ {
+ database = new ReportingDatabase( reader.read( fileReader, false ) );
+ }
+ catch ( IOException e )
+ {
+ throw new ReportingStoreException( e.getMessage(), e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ReportingStoreException( e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+ }
+
+ reports.put( repository, database );
+ }
+ return database;
+ }
+
+ public void storeReports( ReportingDatabase database, ArtifactRepository repository )
+ throws ReportingStoreException
+ {
+ ReportingXpp3Writer writer = new ReportingXpp3Writer();
+
+ File file = new File( repository.getBasedir(), "report-database.xml" );
+ getLogger().info( "Writing reports to " + file );
+ FileWriter fileWriter = null;
+ try
+ {
+ file.getParentFile().mkdirs();
+
+ fileWriter = new FileWriter( file );
+ writer.write( fileWriter, database.getReporting() );
+ }
+ catch ( IOException e )
+ {
+ throw new ReportingStoreException( e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fileWriter );
+ }
+ }
+}
import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Model;
+import java.text.MessageFormat;
import java.util.Iterator;
import java.util.List;
*/
private RepositoryQueryLayerFactory layerFactory;
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
+ public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
- RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( repository );
+ RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
processArtifact( artifact, reporter, queryLayer );
- List dependencies = model.getDependencies();
- processDependencies( dependencies, reporter, queryLayer );
+ if ( model != null )
+ {
+ List dependencies = model.getDependencies();
+ processDependencies( dependencies, reporter, queryLayer, artifact );
+ }
}
- private void processArtifact( Artifact artifact, ArtifactReporter reporter,
+ private void processArtifact( Artifact artifact, ReportingDatabase reporter,
RepositoryQueryLayer repositoryQueryLayer )
{
- if ( repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addSuccess( artifact );
- }
- else
+ if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- reporter.addFailure( artifact, ArtifactReporter.ARTIFACT_NOT_FOUND );
+ reporter.addFailure( artifact, "Artifact does not exist in the repository" );
}
}
- private void processDependencies( List dependencies, ArtifactReporter reporter,
- RepositoryQueryLayer repositoryQueryLayer )
+ private void processDependencies( List dependencies, ReportingDatabase reporter,
+ RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
{
if ( dependencies.size() > 0 )
{
{
Dependency dependency = (Dependency) iterator.next();
- Artifact artifact = null;
try
{
- artifact = createArtifact( dependency );
+ Artifact artifact = createArtifact( dependency );
- if ( repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addSuccess( artifact );
- }
- else
+ if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- reporter.addFailure( artifact, ArtifactReporter.DEPENDENCY_NOT_FOUND );
+ String reason = MessageFormat.format(
+ "Artifact''s dependency {0} does not exist in the repository",
+ new String[]{dependency.toString()} );
+ reporter.addFailure( sourceArtifact, reason );
}
}
catch ( InvalidVersionSpecificationException e )
{
- reporter.addFailure( artifact, ArtifactReporter.DEPENDENCY_INVALID_VERSION );
+ String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
+ new String[]{dependency.toString(),
+ dependency.getVersion()} );
+ reporter.addFailure( sourceArtifact, reason );
}
}
}
*/
private String indexDirectory;
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- throws ReportProcessorException
+ public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
- if ( artifact.getFile() != null )
+ ArtifactRepository repository = artifact.getRepository();
+ // TODO! always null currently, need to configure this properly
+ if ( artifact.getFile() != null && indexDirectory != null )
{
RepositoryArtifactIndex index = indexFactory.createStandardIndex( new File( indexDirectory ) );
- String checksum;
+ String checksum = null;
try
{
checksum = digester.calc( artifact.getFile() );
}
catch ( DigesterException e )
{
- throw new ReportProcessorException( "Failed to generate checksum", e );
+ reporter.addWarning( artifact, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
}
- try
+ if ( checksum != null )
{
- List results = index.search( new LuceneQuery(
- new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
-
- if ( results.isEmpty() )
- {
- reporter.addSuccess( artifact );
- }
- else
+ try
{
- boolean hasDuplicates = false;
- for ( Iterator i = results.iterator(); i.hasNext(); )
- {
- StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
+ List results = index.search( new LuceneQuery(
+ new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
- //make sure it is not the same artifact
- if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
+ if ( !results.isEmpty() )
+ {
+ for ( Iterator i = results.iterator(); i.hasNext(); )
{
- //report only duplicates from the same groupId
- String groupId = artifact.getGroupId();
- if ( groupId.equals( result.getGroupId() ) )
+ StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
+
+ //make sure it is not the same artifact
+ if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
{
- hasDuplicates = true;
- reporter.addFailure( artifact, "Found duplicate for " + artifact.getId() );
+ //report only duplicates from the same groupId
+ String groupId = artifact.getGroupId();
+ if ( groupId.equals( result.getGroupId() ) )
+ {
+ reporter.addFailure( artifact, "Found duplicate for " + artifact.getId() );
+ }
}
}
}
-
- if ( !hasDuplicates )
- {
- reporter.addSuccess( artifact );
- }
}
- }
- catch ( RepositoryIndexSearchException e )
- {
- throw new ReportProcessorException( "Failed to search in index", e );
+ catch ( RepositoryIndexSearchException e )
+ {
+ reporter.addWarning( artifact, "Failed to search in index" + e );
+ }
}
}
else
implements ArtifactReportProcessor
{
/**
- * @param model
- * @param artifact The pom xml file to be validated, passed as an artifact object.
- * @param reporter The artifact reporter object.
- * @param repository the repository where the artifact is located.
+ * @param artifact The pom xml file to be validated, passed as an artifact object.
+ * @param reporter The artifact reporter object.
*/
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
+ public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
+ ArtifactRepository repository = artifact.getRepository();
+
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
{
reader = new FileReader( f );
pomReader.read( reader );
- reporter.addSuccess( artifact );
}
catch ( XmlPullParserException e )
{
reporter.addWarning( artifact, "The artifact is not a pom xml file." );
}
}
-
}
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.apache.maven.project.MavenProjectBuilder;
import org.codehaus.plexus.util.IOUtil;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
*/
private ArtifactFactory artifactFactory;
+ // TODO: share with other code with the same
+ private static final Set JAR_FILE_TYPES =
+ new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
/**
* Check whether the artifact is in its proper location. The location of the artifact
* is validated first against the groupId, artifactId and versionId in the specified model
* included in the package. If a model exists inside the package, then check if the artifact's
* location is valid based on the location specified in the pom. Check if the both the location
* specified in the file system pom and in the pom included in the package is the same.
- *
- * @param model Represents the pom in the file system.
- * @param artifact
- * @param reporter
- * @param repository
*/
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- throws ReportProcessorException
+ public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
+ ArtifactRepository repository = artifact.getRepository();
+
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
"Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
}
- //check if the artifact is located in its proper location based on the info
- //specified in the model object/pom
- Artifact modelArtifact = artifactFactory.createBuildArtifact( model.getGroupId(), model.getArtifactId(),
- model.getVersion(), model.getPackaging() );
-
- boolean failed = false;
- String modelPath = repository.pathOf( modelArtifact );
String artifactPath = repository.pathOf( artifact );
- if ( modelPath.equals( artifactPath ) )
+
+ if ( model != null )
{
- //get the location of the artifact itself
- File file = new File( repository.getBasedir(), artifactPath );
+ //check if the artifact is located in its proper location based on the info
+ //specified in the model object/pom
+ Artifact modelArtifact = artifactFactory.createBuildArtifact( model.getGroupId(), model.getArtifactId(),
+ model.getVersion(), model.getPackaging() );
+
+ String modelPath = repository.pathOf( modelArtifact );
+ if ( !modelPath.equals( artifactPath ) )
+ {
+ reporter.addFailure( artifact,
+ "The artifact is out of place. It does not match the specified location in the repository pom." );
+ }
+ }
+
+ //get the location of the artifact itself
+ File file = new File( repository.getBasedir(), artifactPath );
- if ( file.exists() )
+ if ( file.exists() )
+ {
+ if ( JAR_FILE_TYPES.contains( artifact.getType() ) )
{
//unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
//check if the pom is included in the package
- Model extractedModel = readArtifactModel( file, artifact.getGroupId(), artifact.getArtifactId() );
+ Model extractedModel = readArtifactModel( file, artifact, reporter );
if ( extractedModel != null )
{
{
reporter.addFailure( artifact,
"The artifact is out of place. It does not match the specified location in the packaged pom." );
- failed = true;
}
}
}
- else
- {
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not exist at the specified location in the repository pom." );
- failed = true;
- }
}
else
{
reporter.addFailure( artifact,
- "The artifact is out of place. It does not match the specified location in the repository pom." );
- failed = true;
- }
-
- if ( !failed )
- {
- reporter.addSuccess( artifact );
+ "The artifact is out of place. It does not exist at the specified location in the repository pom." );
}
}
- /**
- * Extract the contents of the artifact/jar file.
- *
- * @param file
- * @param groupId
- * @param artifactId
- */
- private Model readArtifactModel( File file, String groupId, String artifactId )
- throws ReportProcessorException
+ private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
{
Model model = null;
jar = new JarFile( file );
//Get the entry and its input stream.
- JarEntry entry = jar.getJarEntry( "META-INF/maven/" + groupId + "/" + artifactId + "/pom.xml" );
+ JarEntry entry = jar.getJarEntry(
+ "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
// If the entry is not null, extract it.
if ( entry != null )
{
model = readModel( jar.getInputStream( entry ) );
+
+ if ( model.getGroupId() == null )
+ {
+ model.setGroupId( model.getParent().getGroupId() );
+ }
+ if ( model.getVersion() == null )
+ {
+ model.setVersion( model.getParent().getVersion() );
+ }
}
}
catch ( IOException e )
{
- // TODO: should just warn and continue!
- throw new ReportProcessorException( "Unable to read artifact to extract model", e );
+ reporter.addWarning( artifact, "Unable to read artifact to extract model: " + e );
}
catch ( XmlPullParserException e )
{
- // TODO: should just warn and continue!
- throw new ReportProcessorException( "Unable to read artifact to extract model", e );
+ reporter.addWarning( artifact, "Unable to parse extracted model: " + e );
}
finally
{
{
String ROLE = MetadataReportProcessor.class.getName();
- void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- throws ReportProcessorException;
+ void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occurring during reporting.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class ReportProcessorException
- extends Exception
-{
- public ReportProcessorException( String msg, Throwable cause )
- {
- super( msg, cause );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Reporting;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+/**
+ * @todo i18n, including message formatting and parameterisation
+ */
+public class ReportingDatabase
+{
+ private final Reporting reporting;
+
+ private Map artifactMap;
+
+ private Map metadataMap;
+
+ private int totalFailures;
+
+ private int totalWarnings;
+
+ public ReportingDatabase()
+ {
+ reporting = new Reporting();
+ }
+
+ public ReportingDatabase( Reporting reporting )
+ {
+ this.reporting = reporting;
+ }
+
+ public void addFailure( Artifact artifact, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ results.addFailure( createResults( reason ) );
+ totalFailures++;
+ }
+
+ public void addWarning( Artifact artifact, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ results.addWarning( createResults( reason ) );
+ totalWarnings++;
+ }
+
+ private ArtifactResults getArtifactResults( Artifact artifact )
+ {
+ Map artifactMap = getArtifactMap();
+
+ String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getType(), artifact.getClassifier() );
+ ArtifactResults results = (ArtifactResults) artifactMap.get( key );
+ if ( results == null )
+ {
+ results = new ArtifactResults();
+ results.setArtifactId( artifact.getArtifactId() );
+ results.setClassifier( artifact.getClassifier() );
+ results.setGroupId( artifact.getGroupId() );
+ results.setType( artifact.getType() );
+ results.setVersion( artifact.getVersion() );
+
+ artifactMap.put( key, results );
+ reporting.getArtifacts().add( results );
+ }
+
+ return results;
+ }
+
+ private Map getArtifactMap()
+ {
+ if ( artifactMap == null )
+ {
+ Map map = new HashMap();
+ for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
+ {
+ ArtifactResults result = (ArtifactResults) i.next();
+
+ String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
+ result.getType(), result.getClassifier() );
+ map.put( key, result );
+
+ totalFailures += result.getFailures().size();
+ totalWarnings += result.getWarnings().size();
+ }
+ artifactMap = map;
+ }
+ return artifactMap;
+ }
+
+ private static String getArtifactKey( String groupId, String artifactId, String version, String type,
+ String classifier )
+ {
+ return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
+ }
+
+ private static Result createResults( String reason )
+ {
+ Result result = new Result();
+ result.setReason( reason );
+ return result;
+ }
+
+ public void addFailure( RepositoryMetadata metadata, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ results.addFailure( createResults( reason ) );
+ totalFailures++;
+ }
+
+ public void addWarning( RepositoryMetadata metadata, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ results.addWarning( createResults( reason ) );
+ totalWarnings++;
+ }
+
+ private MetadataResults getMetadataResults( RepositoryMetadata metadata )
+ {
+ Map metadataMap = getMetadataMap();
+
+ String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
+
+ MetadataResults results = (MetadataResults) metadataMap.get( key );
+ if ( results == null )
+ {
+ results = new MetadataResults();
+ results.setArtifactId( metadata.getArtifactId() );
+ results.setGroupId( metadata.getGroupId() );
+ results.setVersion( metadata.getBaseVersion() );
+
+ metadataMap.put( key, results );
+ reporting.getMetadata().add( results );
+ }
+
+ return results;
+ }
+
+ private Map getMetadataMap()
+ {
+ if ( metadataMap == null )
+ {
+ Map map = new HashMap();
+ for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
+ {
+ MetadataResults result = (MetadataResults) i.next();
+
+ String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
+
+ map.put( key, result );
+
+ totalFailures += result.getFailures().size();
+ totalWarnings += result.getWarnings().size();
+ }
+ metadataMap = map;
+ }
+ return metadataMap;
+ }
+
+ private static String getMetadataKey( String groupId, String artifactId, String version )
+ {
+ return groupId + ":" + artifactId + ":" + version;
+ }
+
+ public int getNumFailures()
+ {
+ return totalFailures;
+ }
+
+ public int getNumWarnings()
+ {
+ return totalWarnings;
+ }
+
+ public Reporting getReporting()
+ {
+ return reporting;
+ }
+
+ public Iterator getArtifactIterator()
+ {
+ return reporting.getArtifacts().iterator();
+ }
+
+ public Iterator getMetadataIterator()
+ {
+ return reporting.getMetadata().iterator();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * A component for loading the reporting database into the model.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo this is something that could possibly be generalised into Modello.
+ */
+public interface ReportingStore
+{
+ /**
+ * The Plexus role for the component.
+ */
+ String ROLE = ReportingStore.class.getName();
+
+ /**
+ * Get the reports from the store. A cached version may be used.
+ *
+ * @param repository the repository to load the reports for
+ * @return the reporting database
+ * @throws ReportingStoreException if there was a problem reading the store
+ */
+ ReportingDatabase getReportsFromStore( ArtifactRepository repository )
+ throws ReportingStoreException;
+
+ /**
+ * Save the reporting to the store.
+ *
+ * @param database the reports to store
+ * @param repository the repositorry to store the reports in
+ * @throws ReportingStoreException if there was a problem writing the store
+ */
+ void storeReports( ReportingDatabase database, ArtifactRepository repository )
+ throws ReportingStoreException;
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occurring using the reporting store.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ReportingStoreException
+ extends Exception
+{
+ public ReportingStoreException( String message )
+ {
+ super( message );
+ }
+
+ public ReportingStoreException( String message, Throwable e )
+ {
+ super( message, e );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * A result of the report for a given artifact being processed.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:RepositoryMetadataResult.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class RepositoryMetadataResult
- implements Result
-{
- private final RepositoryMetadata metadata;
-
- private final String reason;
-
- public RepositoryMetadataResult( RepositoryMetadata metadata )
- {
- this.metadata = metadata;
- this.reason = null;
- }
-
- public RepositoryMetadataResult( RepositoryMetadata metadata, String reason )
- {
- this.metadata = metadata;
- this.reason = reason;
- }
-
- public RepositoryMetadata getMetadata()
- {
- return metadata;
- }
-
- public String getReason()
- {
- return reason;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * A result record during the reporting.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface Result
-{
- /**
- * The reason given for the result.
- *
- * @return the message
- */
- String getReason();
-}
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<model>
+ <id>reporting</id>
+ <name>Reporting</name>
+ <description>
+ Storage database for reporting results in the repository.
+ </description>
+ <defaults>
+ <default>
+ <key>package</key>
+ <value>org.apache.maven.archiva.reporting.model</value>
+ </default>
+ </defaults>
+ <classes>
+ <class rootElement="true" xml.tagName="reporting">
+ <name>Reporting</name>
+ <version>1.0.0</version>
+ <fields>
+ <field>
+ <name>artifacts</name>
+ <version>1.0.0</version>
+ <association>
+ <type>ArtifactResults</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>metadata</name>
+ <version>1.0.0</version>
+ <association>
+ <type>MetadataResults</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ </fields>
+ </class>
+ <class>
+ <name>ArtifactResults</name>
+ <version>1.0.0</version>
+ <fields>
+ <field>
+ <name>failures</name>
+ <version>1.0.0</version>
+ <association>
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>warnings</name>
+ <version>1.0.0</version>
+ <association>
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field xml.attribute="true">
+ <name>groupId</name>
+ <identity>true</identity>
+ <version>1.0.0</version>
+ <type>String</type>
+ <required>true</required>
+ <description>
+ The group ID of the artifact in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>artifactId</name>
+ <version>1.0.0</version>
+ <identity>true</identity>
+ <type>String</type>
+ <required>true</required>
+ <description>
+ The artifact ID of the artifact in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>version</name>
+ <version>1.0.0</version>
+ <identity>true</identity>
+ <type>String</type>
+ <required>true</required>
+ <description>
+ The version of the artifact in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>type</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <identity>true</identity>
+ <required>true</required>
+ <description>
+ The type of the artifact in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>classifier</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <identity>true</identity>
+ <description>
+ The classifier of the artifact in the result.
+ </description>
+ </field>
+ </fields>
+ </class>
+ <class>
+ <name>MetadataResults</name>
+ <version>1.0.0</version>
+ <fields>
+ <field>
+ <name>failures</name>
+ <version>1.0.0</version>
+ <association>
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>warnings</name>
+ <version>1.0.0</version>
+ <association>
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field xml.attribute="true">
+ <name>groupId</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <required>true</required>
+ <identity>true</identity>
+ <description>
+ The group ID of the metadata in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>artifactId</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <identity>true</identity>
+ <description>
+ The artifact ID of the metadata in the result.
+ </description>
+ </field>
+ <field xml.attribute="true">
+ <name>version</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <identity>true</identity>
+ <description>
+ The version of the metadata in the result.
+ </description>
+ </field>
+ </fields>
+ </class>
+ <class>
+ <name>Result</name>
+ <version>1.0.0</version>
+ <fields>
+ <field xml.attribute="true">
+ <name>reason</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <description>
+ The reason given for the result.
+ </description>
+ </field>
+ </fields>
+ </class>
+ </classes>
+</model>
+
+
* limitations under the License.
*/
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import java.io.File;
/**
- *
+ *
*/
public abstract class AbstractRepositoryReportsTestCase
extends PlexusTestCase
*/
protected ArtifactRepository repository;
+ private ArtifactFactory artifactFactory;
+
protected void setUp()
throws Exception
{
repository = factory.createArtifactRepository( "repository", repositoryDirectory.toURL().toString(), layout,
null, null );
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version )
+ {
+ return createArtifact( groupId, artifactId, version, "jar" );
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
+ {
+ Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, type );
+ artifact.setRepository( repository );
+ return artifact;
}
}
* limitations under the License.
*/
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.model.Model;
import java.util.Iterator;
public class ArtifactReporterTest
extends AbstractRepositoryReportsTestCase
{
- private ArtifactReporter reporter;
+ private ReportingDatabase reporter;
private Artifact artifact;
- private Model model;
-
protected void setUp()
throws Exception
{
super.setUp();
- reporter = (ArtifactReporter) lookup( ArtifactReporter.ROLE );
+ reporter = new ReportingDatabase();
ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
Versioning versioning = new Versioning();
versioning.addVersion( "1.0-alpha-1" );
versioning.setLastUpdated( "20050611.202020" );
- model = new Model();
}
- public void testArtifactReporterSingleSuccess()
+ public void testArtifactReporterSingleFailure()
{
- reporter.addSuccess( artifact );
-
- assertEquals( 1, reporter.getNumSuccesses() );
+ reporter.addFailure( artifact, "failed once" );
- Iterator success = reporter.getArtifactSuccessIterator();
- assertTrue( success.hasNext() );
- Artifact result = ( (ArtifactResult) success.next() ).getArtifact();
- assertEquals( "groupId", result.getGroupId() );
- assertEquals( "artifactId", result.getArtifactId() );
- assertEquals( "1.0-alpha-1", result.getVersion() );
- assertFalse( success.hasNext() );
- }
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
- public void testArtifactReporterMultipleSuccess()
- {
- reporter.addSuccess( artifact );
- reporter.addSuccess( artifact );
- reporter.addSuccess( artifact );
- Iterator success = reporter.getArtifactSuccessIterator();
- assertTrue( success.hasNext() );
- int i;
- for ( i = 0; success.hasNext(); i++ )
+ int count = 0;
+ for ( Iterator i = results.getFailures().iterator(); i.hasNext(); count++ )
{
- success.next();
+ i.next();
}
- assertEquals( 3, i );
- assertEquals( 3, reporter.getNumSuccesses() );
- assertEquals( 0, reporter.getNumFailures() );
- assertEquals( 0, reporter.getNumWarnings() );
- }
-
- public void testArtifactReporterSingleFailure()
- {
- reporter.addFailure( artifact, "failed once" );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertTrue( failure.hasNext() );
- failure.next();
- assertFalse( failure.hasNext() );
- assertEquals( 0, reporter.getNumSuccesses() );
+ assertEquals( 1, count );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
reporter.addFailure( artifact, "failed once" );
reporter.addFailure( artifact, "failed twice" );
reporter.addFailure( artifact, "failed thrice" );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertTrue( failure.hasNext() );
- int i;
- for ( i = 0; failure.hasNext(); i++ )
+
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
+
+ int count = 0;
+ for ( Iterator i = results.getFailures().iterator(); i.hasNext(); count++ )
{
- failure.next();
+ i.next();
}
- assertEquals( 3, i );
- assertEquals( 0, reporter.getNumSuccesses() );
+ assertEquals( 3, count );
assertEquals( 3, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
reporter.addFailure( artifact, "failed once" );
reporter.addFailure( artifact, "failed twice" );
reporter.addFailure( artifact, "failed thrice" );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertEquals( "failed once", ( (ArtifactResult) failure.next() ).getReason() );
- assertEquals( "failed twice", ( (ArtifactResult) failure.next() ).getReason() );
- assertEquals( "failed thrice", ( (ArtifactResult) failure.next() ).getReason() );
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
+ Iterator failure = results.getFailures().iterator();
+ assertEquals( "failed once", ( (Result) failure.next() ).getReason() );
+ assertEquals( "failed twice", ( (Result) failure.next() ).getReason() );
+ assertEquals( "failed thrice", ( (Result) failure.next() ).getReason() );
}
public void testArtifactReporterSingleWarning()
{
reporter.addWarning( artifact, "you've been warned" );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertTrue( warning.hasNext() );
- warning.next();
- assertFalse( warning.hasNext() );
- assertEquals( 0, reporter.getNumSuccesses() );
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
+
+ int count = 0;
+ for ( Iterator i = results.getWarnings().iterator(); i.hasNext(); count++ )
+ {
+ i.next();
+ }
+ assertEquals( 1, count );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 1, reporter.getNumWarnings() );
}
reporter.addWarning( artifact, "you have to stop now" );
reporter.addWarning( artifact, "all right... that does it!" );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertTrue( warning.hasNext() );
- int i;
- for ( i = 0; warning.hasNext(); i++ )
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
+
+ int count = 0;
+ for ( Iterator i = results.getWarnings().iterator(); i.hasNext(); count++ )
{
- warning.next();
+ i.next();
}
- assertEquals( 3, i );
- assertEquals( 0, reporter.getNumSuccesses() );
+ assertEquals( 3, count );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 3, reporter.getNumWarnings() );
}
reporter.addWarning( artifact, "you have to stop now" );
reporter.addWarning( artifact, "all right... that does it!" );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertEquals( "i'm warning you", ( (ArtifactResult) warning.next() ).getReason() );
- assertEquals( "you have to stop now", ( (ArtifactResult) warning.next() ).getReason() );
- assertEquals( "all right... that does it!", ( (ArtifactResult) warning.next() ).getReason() );
+ Iterator artifactIterator = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) artifactIterator.next();
+ assertFalse( artifactIterator.hasNext() );
+ Iterator warning = results.getWarnings().iterator();
+ assertEquals( "i'm warning you", ( (Result) warning.next() ).getReason() );
+ assertEquals( "you have to stop now", ( (Result) warning.next() ).getReason() );
+ assertEquals( "all right... that does it!", ( (Result) warning.next() ).getReason() );
}
}
* limitations under the License.
*/
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
private MetadataReportProcessor badMetadataReportProcessor;
- private ArtifactReporter reporter = new DefaultArtifactReporter();
+ private ReportingDatabase reporter = new ReportingDatabase();
protected void setUp()
throws Exception
}
public void testMetadataMissingLastUpdated()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ Result result = (Result) failures.next();
+ assertMetadata( metadata, results );
assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
public void testMetadataValidVersions()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
public void testMetadataMissingADirectory()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
+ Result result = (Result) failures.next();
+ assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
"Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
}
public void testMetadataInvalidArtifactVersion()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
+ Result result = (Result) failures.next();
+ assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
"Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
}
public void testMoreThanOneMetadataVersionErrors()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ Result result = (Result) failures.next();
+ assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
"Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
result.getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
- result = (RepositoryMetadataResult) failures.next();
+ result = (Result) failures.next();
// TODO: should be more robust
assertEquals( "check reason",
"Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
}
public void testValidPluginMetadata()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
public void testMissingMetadataPlugin()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
}
public void testIncompletePluginMetadata()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
}
public void testInvalidPluginArtifactId()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
+ result.getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
+ result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
public void testInvalidPluginPrefix()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
}
public void testDuplicatePluginPrefixes()
- throws ReportProcessorException
{
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
}
public void testValidSnapshotMetadata()
- throws ReportProcessorException
{
Artifact artifact =
artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
public void testInvalidSnapshotMetadata()
- throws ReportProcessorException
{
Artifact artifact =
artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ Iterator failures = reporter.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ Result result = (Result) failures.next();
+ assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
+ private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
+ {
+ assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
+ assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
+ assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+ }
+
private Plugin createMetadataPlugin( String artifactId, String prefix )
{
Plugin plugin = new Plugin();
*/
import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
import java.io.File;
import java.io.IOException;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ArtifactReporter reporter = new DefaultArtifactReporter();
+ private ReportingDatabase reporter = new ReportingDatabase();
private MetadataReportProcessor metadataReportProcessor;
* Test the ChecksumArtifactReportProcessor when the checksum files are valid.
*/
public void testChecksumArtifactReporterSuccess()
- throws ReportProcessorException, IOException, DigesterException
+ throws DigesterException, IOException
{
createChecksumFile( "VALID" );
createChecksumFile( "INVALID" );
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getNumSuccesses() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 0, reporter.getNumFailures() );
+ assertEquals( 0, reporter.getNumWarnings() );
}
/**
* Test the ChecksumArtifactReportProcessor when the checksum files are invalid.
*/
public void testChecksumArtifactReporterFailed()
- throws ReportProcessorException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
+ String s = "invalidArtifact";
+ String s1 = "1.0";
+ Artifact artifact = createArtifact( "checksumTest", s, s1 );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 1, reporter.getNumFailures() );
+ assertEquals( 0, reporter.getNumWarnings() );
}
/**
* The reporter should report 2 success validation.
*/
public void testChecksumMetadataReporterSuccess()
- throws ReportProcessorException, DigesterException, IOException
+ throws DigesterException, IOException
{
createMetadataFile( "VALID" );
createMetadataFile( "INVALID" );
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
//Version level metadata
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
//Group level metadata
metadata = new GroupRepositoryMetadata( "checksumTest" );
metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator iter = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "check if there is a success", iter.hasNext() );
}
/**
* The reporter must report 2 failures.
*/
public void testChecksumMetadataReporterFailure()
- throws ReportProcessorException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
metadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator iter = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check if there is a failure", iter.hasNext() );
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
}
/**
* Test the conditional when the checksum files of the artifact & metadata do not exist.
*/
public void testChecksumFilesDoNotExist()
- throws ReportProcessorException, DigesterException, IOException
+ throws DigesterException, IOException
{
createChecksumFile( "VALID" );
createMetadataFile( "VALID" );
deleteChecksumFiles( "jar" );
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 1, reporter.getNumFailures() );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
metadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator iter = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check if there is a failure", iter.hasNext() );
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
}
* limitations under the License.
*/
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
public class DefaultArtifactReporterTest
extends AbstractRepositoryReportsTestCase
{
- private ArtifactReporter reporter;
-
- private Artifact artifact;
+ private ReportingDatabase reporter;
private RepositoryMetadata metadata;
{
assertEquals( "No failures", 0, reporter.getNumFailures() );
assertEquals( "No warnings", 0, reporter.getNumWarnings() );
- assertEquals( "No successes", 0, reporter.getNumSuccesses() );
- assertFalse( "No artifact failures", reporter.getArtifactFailureIterator().hasNext() );
- assertFalse( "No artifact warnings", reporter.getArtifactWarningIterator().hasNext() );
- assertFalse( "No artifact successes", reporter.getArtifactSuccessIterator().hasNext() );
- assertFalse( "No metadata failures", reporter.getRepositoryMetadataFailureIterator().hasNext() );
- assertFalse( "No metadata warnings", reporter.getRepositoryMetadataWarningIterator().hasNext() );
- assertFalse( "No metadata successes", reporter.getRepositoryMetadataSuccessIterator().hasNext() );
+ assertFalse( "No artifact failures", reporter.getArtifactIterator().hasNext() );
+ assertFalse( "No metadata failures", reporter.getMetadataIterator().hasNext() );
}
public void testMetadataSingleFailure()
reporter.addFailure( metadata, "Single Failure Reason" );
assertEquals( "failures count", 1, reporter.getNumFailures() );
assertEquals( "warnings count", 0, reporter.getNumWarnings() );
- assertEquals( "successes count", 0, reporter.getNumSuccesses() );
- Iterator results = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ Result result = (Result) failures.next();
+ assertMetadata( results );
assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
+ assertFalse( "no more failures", failures.hasNext() );
+ }
+
+ private void assertMetadata( MetadataResults result )
+ {
+ assertEquals( "check failure cause", metadata.getGroupId(), result.getGroupId() );
+ assertEquals( "check failure cause", metadata.getArtifactId(), result.getArtifactId() );
+ assertEquals( "check failure cause", metadata.getBaseVersion(), result.getVersion() );
}
public void testMetadataMultipleFailures()
reporter.addFailure( metadata, "Second Failure Reason" );
assertEquals( "failures count", 2, reporter.getNumFailures() );
assertEquals( "warnings count", 0, reporter.getNumWarnings() );
- assertEquals( "successes count", 0, reporter.getNumSuccesses() );
- Iterator results = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ Result result = (Result) failures.next();
+ assertMetadata( results );
assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
- assertTrue( "must have 2nd failure", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertTrue( "must have 2nd failure", failures.hasNext() );
+ result = (Result) failures.next();
assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
+ assertFalse( "no more failures", failures.hasNext() );
}
public void testMetadataSingleWarning()
{
reporter.addWarning( metadata, "Single Warning Message" );
- assertEquals( "failures count", 0, reporter.getNumFailures() );
+ assertEquals( "warnings count", 0, reporter.getNumFailures() );
assertEquals( "warnings count", 1, reporter.getNumWarnings() );
- assertEquals( "successes count", 0, reporter.getNumSuccesses() );
- Iterator results = reporter.getRepositoryMetadataWarningIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ Iterator warnings = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", warnings.hasNext() );
+ MetadataResults results = (MetadataResults) warnings.next();
+ warnings = results.getWarnings().iterator();
+ assertTrue( "check there is a failure", warnings.hasNext() );
+ Result result = (Result) warnings.next();
+ assertMetadata( results );
assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
+ assertFalse( "no more warnings", warnings.hasNext() );
}
public void testMetadataMultipleWarnings()
{
reporter.addWarning( metadata, "First Warning" );
reporter.addWarning( metadata, "Second Warning" );
- assertEquals( "failures count", 0, reporter.getNumFailures() );
+ assertEquals( "warnings count", 0, reporter.getNumFailures() );
assertEquals( "warnings count", 2, reporter.getNumWarnings() );
- assertEquals( "successes count", 0, reporter.getNumSuccesses() );
- Iterator results = reporter.getRepositoryMetadataWarningIterator();
- assertTrue( "must have warnings", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ Iterator warnings = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", warnings.hasNext() );
+ MetadataResults results = (MetadataResults) warnings.next();
+ warnings = results.getWarnings().iterator();
+ assertTrue( "check there is a failure", warnings.hasNext() );
+ Result result = (Result) warnings.next();
+ assertMetadata( results );
assertEquals( "check failure reason", "First Warning", result.getReason() );
- assertTrue( "must have 2nd warning", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertTrue( "must have 2nd warning", warnings.hasNext() );
+ result = (Result) warnings.next();
assertEquals( "check failure reason", "Second Warning", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataSingleSuccess()
- {
- reporter.addSuccess( metadata );
- assertEquals( "failures count", 0, reporter.getNumFailures() );
- assertEquals( "warnings count", 0, reporter.getNumWarnings() );
- assertEquals( "successes count", 1, reporter.getNumSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "must have successes", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataMultipleSuccesses()
- {
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-beta-1" );
- versioning.addVersion( "1.0-beta-2" );
- RepositoryMetadata metadata2 = new ArtifactRepositoryMetadata( artifact, versioning );
-
- reporter.addSuccess( metadata );
- reporter.addSuccess( metadata2 );
- assertEquals( "failures count", 0, reporter.getNumFailures() );
- assertEquals( "warnings count", 0, reporter.getNumWarnings() );
- assertEquals( "successes count", 2, reporter.getNumSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "must have successes", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertTrue( "must have 2nd success", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata2, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertFalse( "no more successes", results.hasNext() );
+ assertFalse( "no more warnings", warnings.hasNext() );
}
protected void setUp()
{
super.setUp();
- reporter = new DefaultArtifactReporter();
+ reporter = new ReportingDatabase();
ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
Versioning versioning = new Versioning();
versioning.addVersion( "1.0-alpha-1" );
versioning.addVersion( "1.0-alpha-2" );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
- reporter = null;
- metadata = null;
+ metadata = new ArtifactRepositoryMetadata( artifact, versioning );
}
}
* limitations under the License.
*/
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.model.Dependency;
private static final String VALID_VERSION = "1.0-alpha-1";
- private ArtifactReporter reporter;
+ private ReportingDatabase reporter;
private Model model;
throws Exception
{
super.setUp();
- reporter = (ArtifactReporter) lookup( ArtifactReporter.ROLE );
+ reporter = new ReportingDatabase();
model = new Model();
processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
}
public void testArtifactFoundButNoDirectDependencies()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
private Artifact createValidArtifact()
{
- return artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+ Artifact projectArtifact =
+ artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+ projectArtifact.setRepository( repository );
+ return projectArtifact;
}
public void testArtifactNotFound()
- throws ReportProcessorException
{
Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 0, reporter.getNumSuccesses() );
+ artifact.setRepository( repository );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.ARTIFACT_NOT_FOUND, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( "Artifact does not exist in the repository", result.getReason() );
}
public void testValidArtifactWithNullDependency()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 2, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
}
public void testValidArtifactWithValidSingleDependency()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 2, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
public void testValidArtifactWithValidMultipleDependencies()
- throws ReportProcessorException
{
Dependency dependency = createValidDependency();
model.addDependency( dependency );
model.addDependency( dependency );
Artifact artifact = createValidArtifact();
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 6, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 0, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
}
public void testValidArtifactWithAnInvalidDependency()
- throws ReportProcessorException
{
Dependency dependency = createValidDependency();
model.addDependency( dependency );
model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
Artifact artifact = createValidArtifact();
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 5, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
+ result.getReason() );
}
public void testValidArtifactWithInvalidDependencyGroupId()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
}
private Dependency createDependency( String o, String valid, String s )
}
public void testValidArtifactWithInvalidDependencyArtifactId()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
}
public void testValidArtifactWithIncorrectDependencyVersion()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( getDependencyNotFoundMessage( dependency ), result.getReason() );
}
public void testValidArtifactWithInvalidDependencyVersion()
- throws ReportProcessorException
{
Artifact artifact = createValidArtifact();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
model.addDependency( dependency );
- processor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ processor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
assertEquals( 0, reporter.getNumWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_INVALID_VERSION, result.getReason() );
+ Iterator failures = reporter.getArtifactIterator();
+ ArtifactResults results = (ArtifactResults) failures.next();
+ assertFalse( failures.hasNext() );
+ failures = results.getFailures().iterator();
+ Result result = (Result) failures.next();
+ assertEquals( getDependencyVersionInvalidMessage( dependency, "[" ), result.getReason() );
+ }
+
+ private String getDependencyVersionInvalidMessage( Dependency dependency, String version )
+ {
+ return "Artifact's dependency " + dependency + " contains an invalid version " + version;
+ }
+
+ private String getDependencyNotFoundMessage( Dependency dependency )
+ {
+ return "Artifact's dependency " + dependency.toString() + " does not exist in the repository";
}
}
File indexDirectory;
- private DefaultArtifactReporter reporter = new DefaultArtifactReporter();
+ private ReportingDatabase reportDatabase = new ReportingDatabase();
protected void setUp()
throws Exception
{
artifact.setFile( null );
- processor.processArtifact( model, artifact, reporter, repository );
+ processor.processArtifact( artifact, model, reportDatabase );
- assertEquals( "Check no successes", 0, reporter.getNumSuccesses() );
- assertEquals( "Check warnings", 1, reporter.getNumWarnings() );
- assertEquals( "Check no failures", 0, reporter.getNumFailures() );
+ assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
+ assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
}
public void testSuccessOnAlreadyIndexedArtifact()
throws Exception
{
- processor.processArtifact( model, artifact, reporter, repository );
+ processor.processArtifact( artifact, model, reportDatabase );
- assertEquals( "Check no successes", 1, reporter.getNumSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getNumWarnings() );
- assertEquals( "Check no failures", 0, reporter.getNumFailures() );
+ assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+ assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
}
public void testSuccessOnDifferentGroupId()
throws Exception
{
artifact.setGroupId( "different.groupId" );
- processor.processArtifact( model, artifact, reporter, repository );
+ processor.processArtifact( artifact, model, reportDatabase );
- assertEquals( "Check no successes", 1, reporter.getNumSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getNumWarnings() );
- assertEquals( "Check no failures", 0, reporter.getNumFailures() );
+ assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+ assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
}
public void testSuccessOnNewArtifact()
{
Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
- processor.processArtifact( model, newArtifact, reporter, repository );
+ processor.processArtifact( newArtifact, model, reportDatabase );
- assertEquals( "Check no successes", 1, reporter.getNumSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getNumWarnings() );
- assertEquals( "Check no failures", 0, reporter.getNumFailures() );
+ assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+ assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
}
public void testFailure()
artifact.getVersion(), artifact.getType() );
duplicate.setFile( artifact.getFile() );
- processor.processArtifact( model, duplicate, reporter, repository );
+ processor.processArtifact( duplicate, model, reportDatabase );
- assertEquals( "Check no successes", 0, reporter.getNumSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getNumWarnings() );
- assertEquals( "Check no failures", 1, reporter.getNumFailures() );
+ assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
+ assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
}
private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
*/
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.versioning.VersionRange;
/**
* This class tests the InvalidPomArtifactReportProcessor class.
{
private ArtifactReportProcessor artifactReportProcessor;
- private ArtifactReporter reporter = new DefaultArtifactReporter();
+ private ReportingDatabase reporter = new ReportingDatabase();
public void setUp()
throws Exception
* Test the InvalidPomArtifactReportProcessor when the artifact is an invalid pom.
*/
public void testInvalidPomArtifactReportProcessorFailure()
- throws ReportProcessorException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-3" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "artifactId", version, "compile", "pom", "", handler );
+ Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
assertEquals( 1, reporter.getNumFailures() );
}
* Test the InvalidPomArtifactReportProcessor when the artifact is a valid pom.
*/
public void testInvalidPomArtifactReportProcessorSuccess()
- throws ReportProcessorException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "pom", "", handler );
+ Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 0, reporter.getNumFailures() );
+ assertEquals( 0, reporter.getNumWarnings() );
}
* Test the InvalidPomArtifactReportProcessor when the artifact is not a pom.
*/
public void testNotAPomArtifactReportProcessorSuccess()
- throws ReportProcessorException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 0, reporter.getNumFailures() );
assertEquals( 1, reporter.getNumWarnings() );
}
}
*/
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ArtifactReporter reporter = new DefaultArtifactReporter();
+ private ReportingDatabase reporter = new ReportingDatabase();
private MavenXpp3Reader pomReader;
* both in the file system pom and in the pom included in the package.
*/
public void testPackagedPomLocationArtifactReporterSuccess()
- throws ReportProcessorException, IOException, XmlPullParserException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.0" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-model", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-model/2.0/maven-model-2.0.pom";
- Model model = readPom( path );
+ Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 0, reporter.getNumFailures() );
+ assertEquals( 0, reporter.getNumWarnings() );
}
/**
* file system pom (but the jar file does not have a pom included in its package).
*/
public void testLocationArtifactReporterSuccess()
- throws ReportProcessorException, IOException, XmlPullParserException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
-
- String path = "groupId/artifactId/1.0-alpha-1/artifactId-1.0-alpha-1.pom";
- Model model = readPom( path );
+ Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getNumSuccesses() );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
+ assertEquals( 0, reporter.getNumFailures() );
+ assertEquals( 0, reporter.getNumWarnings() );
}
/**
* in the file system pom.
*/
public void testLocationArtifactReporterFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
-
- String path = "groupId/artifactId/1.0-alpha-2/artifactId-1.0-alpha-2.pom";
- Model model = readPom( path );
+ Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2" );
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
assertEquals( 1, reporter.getNumFailures() );
}
* location in the file system pom but instead matches the specified location in the packaged pom.
*/
public void testFsPomArtifactMatchFailure()
- throws IOException, ReportProcessorException, XmlPullParserException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.0" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-archiver", version, "compile", "jar", "", handler );
+ Artifact artifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0" );
- String path = "org/apache/maven/maven-archiver/2.0/maven-archiver-2.0.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
+ Model model = readPom( repository.pathOf( pomArtifact ) );
+ artifactReportProcessor.processArtifact( artifact, model, reporter );
assertEquals( 1, reporter.getNumFailures() );
}
* location specified in the packaged pom but matches the location specified in the file system pom.
*/
public void testPkgPomArtifactMatchFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.1" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-monitor", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-monitor/2.1/maven-monitor-2.1.pom";
- Model model = readPom( path );
+ Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
assertEquals( 1, reporter.getNumFailures() );
}
* location specified in the packaged pom and the location specified in the file system pom.
*/
public void testBothPomArtifactMatchFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
+ throws IOException, XmlPullParserException
{
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.1" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-project", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-project/2.1/maven-project-2.1.pom";
- Model model = readPom( path );
+ Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ artifactReportProcessor.processArtifact( artifact, null, reporter );
assertEquals( 1, reporter.getNumFailures() );
}
+
}
<project>
- <parent>
- <artifactId>maven</artifactId>
- <groupId>org.apache.maven</groupId>
- <version>2.1</version>
- </parent>
+ <groupId>org.apache.maven</groupId>
<modelVersion>4.0.0</modelVersion>
<artifactId>maven-archiver</artifactId>
<name>Maven Archiver</name>
<ww:form method="post" action="saveConfiguration" namespace="/admin" validate="true">
<ww:textfield name="indexPath" label="Index Directory" size="100" required="true"/>
<ww:textfield name="indexerCronExpression" label="Indexing Schedule"/>
- <ww:textfield name="reporterCronExpression" label="Reporting Schedule"/>
<ww:hidden name="proxy.protocol" value="http"/>
<ww:textfield name="proxy.host" label="HTTP Proxy Host"/>
<ww:textfield name="proxy.port" label="HTTP Proxy Port"/>
<%-- TODO: a "delete index and run now" operation should be here too (really clean, remove deletions that didn't get picked up) --%>
<td><a href="<ww:url action="runIndexer" />">Run Now</a></td>
</tr>
- <tr>
- <th>Reporting Schedule</th>
- <td>
- <ww:property value="reporterCronExpression"/>
- </td>
- <td><a href="<ww:url action="runReporter" />">Run Now</a></td>
- </tr>
</table>
<ww:set name="proxy" value="proxy"/>