if ( !metadata.getGroupId().equals( artifact.getGroupId() ) )
{
- reporter.addFailure( artifact, getI18NString( groupIdKey ) );
+ addFailure( reporter, artifact, groupIdKey );
result = false;
}
if ( !repositoryMetadata.storedInGroupDirectory() )
{
if ( !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
{
- reporter.addFailure( artifact, getI18NString( artifactIdKey ) );
+ addFailure( reporter, artifact, artifactIdKey );
result = false;
}
if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
if ( !foundVersion )
{
- reporter.addFailure( artifact, getI18NString( versionsKey ) );
+ addFailure( reporter, artifact, versionsKey );
result = false;
}
}
// snapshot metadata
if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
{
- reporter.addFailure( artifact, getI18NString( versionKey ) );
+ addFailure( reporter, artifact, versionKey );
result = false;
}
if ( !correct )
{
- reporter.addFailure( artifact, getI18NString( snapshotKey ) );
+ addFailure( reporter, artifact, snapshotKey );
result = false;
}
}
return result;
}
+ private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+ {
+ addFailureWithReason( reporter, artifact, getI18NString( key ) );
+
+ }
+
+ private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+ {
+ // TODO: should we be able to identify/fix these?
+ reporter.addWarning( artifact, null, null, message );
+ }
+
+ private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+ {
+ // TODO: should we be able to identify/fix these?
+ reporter.addFailure( artifact, null, null, reason );
+ }
+
private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
FileTransaction transaction )
throws RepositoryConversionException
for ( Iterator i = warnings.iterator(); i.hasNext(); )
{
String message = (String) i.next();
- reporter.addWarning( artifact, message );
+ addWarning( reporter, artifact, message );
}
}
catch ( XmlPullParserException e )
{
- reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( reporter, artifact,
+ getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
catch ( IOException e )
}
catch ( PomTranslationException e )
{
- reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( reporter, artifact,
+ getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
finally
}
else
{
- reporter.addWarning( artifact, getI18NString( "warning.missing.pom" ) );
+ addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
}
return result;
}
}
catch ( DigesterException e )
{
- reporter.addFailure( artifact, getI18NString( key ) );
+ addFailure( reporter, artifact, key );
result = false;
}
}
matching = FileUtils.contentEquals( sourceFile, targetFile );
if ( !matching )
{
- reporter.addFailure( artifact, getI18NString( "failure.target.already.exists" ) );
+ addFailure( reporter, artifact, "failure.target.already.exists" );
result = false;
}
}
*/
private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
+ private static final String ROLE_HINT = "bad-metadata";
+
/**
* Process the metadata encountered in the repository and report all errors found, if any.
*
}
catch ( IOException e )
{
- reporter.addWarning( metadata, "Error getting plugin artifact directories versions: " + e );
+ addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
}
}
else
}
if ( !found )
{
- reporter.addFailure( metadata, "Missing lastUpdated element inside the metadata." );
+ addFailure( reporter, metadata, "missing-last-updated",
+ "Missing lastUpdated element inside the metadata." );
}
if ( metadata.storedInArtifactVersionDirectory() )
}
catch ( IOException e )
{
- reporter.addWarning( metadata, "Error getting plugin artifact directories versions: " + e );
+ String reason = "Error getting plugin artifact directories versions: " + e;
+ addWarning( reporter, metadata, null, reason );
}
}
}
}
+ private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+ String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+ }
+
/**
* Method for processing a GroupRepositoryMetadata
*
String artifactId = plugin.getArtifactId();
if ( artifactId == null || artifactId.length() == 0 )
{
- reporter.addFailure( metadata,
- "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
+ addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+ "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
}
String prefix = plugin.getPrefix();
if ( prefix == null || prefix.length() == 0 )
{
- reporter.addFailure( metadata, "Missing or empty plugin prefix for artifactId " + artifactId + "." );
+ addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+ "Missing or empty plugin prefix for artifactId " + artifactId + "." );
}
else
{
if ( prefixes.containsKey( prefix ) )
{
- reporter.addFailure( metadata, "Duplicate plugin prefix found: " + prefix + "." );
+ addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
+ "Duplicate plugin prefix found: " + prefix + "." );
}
else
{
File pluginDir = new File( metadataDir, artifactId );
if ( !pluginDirs.contains( pluginDir ) )
{
- reporter.addFailure( metadata, "Metadata plugin " + artifactId + " not found in the repository" );
+ addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
+ "Metadata plugin " + artifactId + " not found in the repository" );
}
else
{
for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
{
File plugin = (File) plugins.next();
- reporter.addFailure( metadata, "Plugin " + plugin.getName() + " is present in the repository but " +
- "missing in the metadata." );
+ addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
+ plugin.getName() + " is present in the repository but " + "missing in the metadata." );
}
}
}
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- reporter.addFailure( metadata, "Snapshot artifact " + version + " does not exist." );
+ addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
+ "Snapshot artifact " + version + " does not exist." );
}
}
}
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
- "missing in the repository." );
+ addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
+ version + " is present in metadata but " + "missing in the repository." );
}
}
}
* @param metadata the metadata to be processed.
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
+ * @throws java.io.IOException if there is a problem reading from the file system
*/
private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
ReportingDatabase reporter )
String version = path.getParentFile().getName();
if ( !metadataVersions.contains( version ) )
{
- reporter.addFailure( metadata, "Artifact version " + version + " found in the repository but " +
- "missing in the metadata." );
+ addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
+ version + " found in the repository but " + "missing in the metadata." );
}
}
}
else
{
- reporter.addFailure( metadata, "Metadata's directory did not exist: " + versionsDir );
+ addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
}
}
return artifactIdFiles;
}
+
+ private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+ String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ }
}
*/
private Digester md5Digester;
+ private static final String ROLE_HINT = "checksum";
+
public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
ArtifactRepository repository = artifact.getRepository();
}
catch ( DigesterException e )
{
- reporter.addFailure( artifact, e.getMessage() );
+ addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- reporter.addFailure( artifact, "Read file error: " + e.getMessage() );
+ addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- reporter.addFailure( artifact, digester.getAlgorithm() + " checksum file does not exist." );
+ addFailure( reporter, artifact, "checksum-missing",
+ digester.getAlgorithm() + " checksum file does not exist." );
}
}
+
+ private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ }
}
*/
private Digester md5Digester;
+ private static final String ROLE_HINT = "checksum-metadata";
+
/**
* Validate the checksums of the metadata. Get the metadata file from the
* repository then validate the checksum.
}
catch ( DigesterException e )
{
- reporter.addFailure( metadata, e.getMessage() );
+ addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- reporter.addFailure( metadata, "Read file error: " + e.getMessage() );
+ addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- reporter.addFailure( metadata, digester.getAlgorithm() + " checksum file does not exist." );
+ addFailure( reporter, metadata, "checksum-missing",
+ digester.getAlgorithm() + " checksum file does not exist." );
}
}
+ private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
+ String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ }
+
}
}
catch ( ProjectBuildingException e )
{
- reporter.addWarning( artifact, "Error reading project model: " + e );
+ reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
}
reporter.removeArtifact( artifact );
* limitations under the License.
*/
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.LinkedHashSet;
-import java.util.Set;
+import java.util.LinkedHashMap;
+import java.util.Map;
/**
* The default report set, for repository health.
{
/**
* Role hints of the reports to include in this set.
- *
- * @todo re-enable duplicate, once a way to populate the index is determined!
*/
- private static final Set reports = new LinkedHashSet( Arrays.asList( new String[]{"checksum", "dependency"
-/*, "duplicate"*/, "invalid-pom", "artifact-location", "bad-metadata", "checksum-metadata"} ) );
+ private static final Map reports = new LinkedHashMap();
+
+ static
+ {
+ reports.put( "checksum", "Checksum Problems" );
+ reports.put( "dependency", "Dependency Problems" );
+ // TODO re-enable duplicate, once a way to populate the index is determined!
+// reports.put( "duplicate", "Duplicate Artifact Problems" );
+ reports.put( "invalid-pom", "POM Problems" );
+ reports.put( "bad-metadata", "Metadata Problems" );
+ reports.put( "checksum-metadata", "Metadata Checksum Problems" );
+ reports.put( "artifact-location", "Artifact Location Problems" );
+ }
public boolean includeReport( String key )
{
- return reports.contains( key );
+ return reports.containsKey( key );
}
- public Collection getReportIds()
+ public Map getReports()
{
return reports;
}
private static final String POM = "pom";
+ private static final String ROLE_HINT = "dependency";
+
public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
- processArtifact( artifact, reporter, queryLayer );
+ if ( !queryLayer.containsArtifact( artifact ) )
+ {
+ // TODO: is this even possible?
+ addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+ }
if ( model != null && POM.equals( artifact.getType() ) )
{
}
}
- private void processArtifact( Artifact artifact, ReportingDatabase reporter,
- RepositoryQueryLayer repositoryQueryLayer )
+ private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
{
- if ( !repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addFailure( artifact, "Artifact does not exist in the repository" );
- }
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( artifact, ROLE_HINT, problem, reason );
}
private void processDependencies( List dependencies, ReportingDatabase reporter,
String reason = MessageFormat.format(
"Artifact''s dependency {0} does not exist in the repository",
new String[]{getDependencyString( dependency )} );
- reporter.addFailure( sourceArtifact, reason );
+ addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
+ reason );
}
}
catch ( InvalidVersionSpecificationException e )
String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
new String[]{getDependencyString( dependency ),
dependency.getVersion()} );
- reporter.addFailure( sourceArtifact, reason );
+ addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
}
}
}
}
+ private String getDependencyKey( Dependency dependency )
+ {
+ String str = dependency.getGroupId();
+ str += ":" + dependency.getArtifactId();
+ str += ":" + dependency.getVersion();
+ str += ":" + dependency.getType();
+ if ( dependency.getClassifier() != null )
+ {
+ str += ":" + dependency.getClassifier();
+ }
+ return str;
+ }
+
static String getDependencyString( Dependency dependency )
{
String str = "(group=" + dependency.getGroupId();
*/
private String indexDirectory;
+ private static final String ROLE_HINT = "duplicate";
+
public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
{
ArtifactRepository repository = artifact.getRepository();
}
catch ( DigesterException e )
{
- reporter.addWarning( artifact, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+ addWarning( reporter, artifact, null,
+ "Unable to generate checksum for " + artifact.getFile() + ": " + e );
}
if ( checksum != null )
String groupId = artifact.getGroupId();
if ( groupId.equals( result.getGroupId() ) )
{
- reporter.addFailure( artifact, "Found duplicate for " + artifact.getId() );
+ addFailures( reporter, artifact, "duplicate",
+ "Found duplicate for " + artifact.getId() );
}
}
}
}
catch ( RepositoryIndexSearchException e )
{
- reporter.addWarning( artifact, "Failed to search in index" + e );
+ addWarning( reporter, artifact, null, "Failed to search in index" + e );
}
}
}
else
{
- reporter.addWarning( artifact, "Artifact file is null" );
+ addWarning( reporter, artifact, null, "Artifact file is null" );
}
}
+
+ private static void addFailures( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ }
+
+ private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+ }
}
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
-import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
public class InvalidPomArtifactReportProcessor
implements ArtifactReportProcessor
{
+ private static final String ROLE_HINT = "invalid-pom";
+
/**
* @param artifact The pom xml file to be validated, passed as an artifact object.
* @param reporter The artifact reporter object.
if ( !f.exists() )
{
- reporter.addFailure( artifact, "Artifact not found." );
+ addFailure( reporter, artifact, "pom-missing", "POM not found." );
}
else
{
}
catch ( XmlPullParserException e )
{
- reporter.addFailure( artifact, "The pom xml file is not well-formed. Error while parsing: " +
- e.getMessage() );
- }
- catch ( FileNotFoundException e )
- {
- reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
+ addFailure( reporter, artifact, "pom-parse-exception",
+ "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
}
catch ( IOException e )
{
- reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
+ addFailure( reporter, artifact, "pom-io-exception",
+ "Error while reading the pom xml file: " + e.getMessage() );
}
finally
{
}
}
}
+
+ private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ }
}
private static final String POM = "pom";
+ private static final String ROLE_HINT = "artifact-location";
+
/**
* Check whether the artifact is in its proper location. The location of the artifact
* is validated first against the groupId, artifactId and versionId in the specified model
String modelPath = repository.pathOf( modelArtifact );
if ( !modelPath.equals( artifactPath ) )
{
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not match the specified location in the repository pom: " +
- modelPath );
+ addFailure( reporter, artifact, "repository-pom-location",
+ "The artifact is out of place. It does not match the specified location in the repository pom: " +
+ modelPath );
}
}
}
extractedModel.getPackaging() );
if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
{
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not match the specified location in the packaged pom." );
+ addFailure( reporter, artifact, "packaged-pom-location",
+ "The artifact is out of place. It does not match the specified location in the packaged pom." );
}
}
}
}
}
+ private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ }
+
private static void adjustDistributionArtifactHandler( Artifact artifact )
{
// need to tweak these as they aren't currently in the known type converters. TODO - add them in Maven
}
catch ( IOException e )
{
- reporter.addWarning( artifact, "Unable to read artifact to extract model: " + e );
+ addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
}
catch ( XmlPullParserException e )
{
- reporter.addWarning( artifact, "Unable to parse extracted model: " + e );
+ addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
}
finally
{
return model;
}
+ private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+ {
+ // TODO: reason could be an i18n key derived from the processor and the problem ID and the
+ reporter.addWarning( artifact, ROLE_HINT, null, reason );
+ }
+
private Model readModel( InputStream entryStream )
throws IOException, XmlPullParserException
{
* limitations under the License.
*/
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.LinkedHashSet;
-import java.util.Set;
+import java.util.LinkedHashMap;
+import java.util.Map;
/**
* The report set for finding old artifacts (both snapshot and release)
*
* @todo implement these report processors!
*/
- private static final Set reports =
- new LinkedHashSet( Arrays.asList( new String[]{"old-artifact", "old-snapshot-artifact"} ) );
+ private static final Map reports = new LinkedHashMap();
+
+ static
+ {
+ reports.put( "old-artifact", "Old Artifacts" );
+ reports.put( "old-snapshot-artifact", "Old Snapshot Artifacts" );
+ }
public boolean includeReport( String key )
{
- return reports.contains( key );
+ return reports.containsKey( key );
}
- public Collection getReportIds()
+ public Map getReports()
{
return reports;
}
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.apache.maven.model.Model;
-import java.util.Collection;
+import java.util.Map;
/*
* Copyright 2005-2006 The Apache Software Foundation.
boolean includeReport( String key );
/**
- * Get a list of the report processors in this set.
+ * Get the report processors in this set. The map is keyed by the report's role hint, and the value is it's
+ * display name.
*
- * @return the report IDs
+ * @return the reports
*/
- Collection getReportIds();
+ Map getReports();
/**
* Get the user-friendly name of this report.
private Set metadataWithProblems;
+ private Map filteredDatabases = new HashMap();
+
public ReportingDatabase( ReportGroup reportGroup )
{
this( reportGroup, new Reporting() );
initMetadataMap();
}
- public void addFailure( Artifact artifact, String reason )
+ public void addFailure( Artifact artifact, String processor, String problem, String reason )
{
ArtifactResults results = getArtifactResults( artifact );
- results.addFailure( createResults( reason ) );
+ results.addFailure( createResult( processor, problem, reason ) );
numFailures++;
updateTimings();
+
+ if ( filteredDatabases.containsKey( problem ) )
+ {
+ ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+ reportingDatabase.addFailure( artifact, processor, problem, reason );
+ }
}
- public void addWarning( Artifact artifact, String reason )
+ public void addWarning( Artifact artifact, String processor, String problem, String reason )
{
ArtifactResults results = getArtifactResults( artifact );
- results.addWarning( createResults( reason ) );
+ results.addWarning( createResult( processor, problem, reason ) );
numWarnings++;
updateTimings();
+
+ if ( filteredDatabases.containsKey( problem ) )
+ {
+ ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+ reportingDatabase.addWarning( artifact, processor, problem, reason );
+ }
}
private ArtifactResults getArtifactResults( Artifact artifact )
+ {
+ return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getType(), artifact.getClassifier() );
+ }
+
+ private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
+ String classifier )
{
Map artifactMap = this.artifactMap;
- String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getType(), artifact.getClassifier() );
+ String key = getArtifactKey( groupId, artifactId, version, type, classifier );
ArtifactResults results = (ArtifactResults) artifactMap.get( key );
if ( results == null )
{
results = new ArtifactResults();
- results.setArtifactId( artifact.getArtifactId() );
- results.setClassifier( artifact.getClassifier() );
- results.setGroupId( artifact.getGroupId() );
- results.setType( artifact.getType() );
- results.setVersion( artifact.getVersion() );
+ results.setArtifactId( artifactId );
+ results.setClassifier( classifier );
+ results.setGroupId( groupId );
+ results.setType( type );
+ results.setVersion( version );
artifactMap.put( key, results );
reporting.getArtifacts().add( results );
return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
}
- private static Result createResults( String reason )
+ private static Result createResult( String processor, String problem, String reason )
{
Result result = new Result();
+ result.setProcessor( processor );
+ result.setProblem( problem );
result.setReason( reason );
return result;
}
- public void addFailure( RepositoryMetadata metadata, String reason )
+ public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
{
MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
if ( !metadataWithProblems.contains( results ) )
{
metadataWithProblems.add( results );
}
- results.addFailure( createResults( reason ) );
+ results.addFailure( createResult( processor, problem, reason ) );
numFailures++;
updateTimings();
+
+ if ( filteredDatabases.containsKey( problem ) )
+ {
+ ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+ reportingDatabase.addFailure( metadata, processor, problem, reason );
+ }
}
- public void addWarning( RepositoryMetadata metadata, String reason )
+ public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
{
MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
if ( !metadataWithProblems.contains( results ) )
{
metadataWithProblems.add( results );
}
- results.addWarning( createResults( reason ) );
+ results.addWarning( createResult( processor, problem, reason ) );
numWarnings++;
updateTimings();
+
+ if ( filteredDatabases.containsKey( problem ) )
+ {
+ ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
+
+ reportingDatabase.addWarning( metadata, processor, problem, reason );
+ }
}
public Set getMetadataWithProblems()
public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
{
- String key = getMetadataKey( metadata );
+ String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
Map map = metadataMap;
MetadataResults results = (MetadataResults) map.get( key );
return results != null && results.getLastModified() >= timestamp;
private MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
{
- String key = getMetadataKey( metadata );
+ return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
+ lastModified );
+ }
+
+ private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
+ long lastModified )
+ {
+ String key = getMetadataKey( groupId, artifactId, baseVersion );
Map metadataMap = this.metadataMap;
MetadataResults results = (MetadataResults) metadataMap.get( key );
if ( results == null )
{
results = new MetadataResults();
- results.setArtifactId( metadata.getArtifactId() );
- results.setGroupId( metadata.getGroupId() );
- results.setVersion( metadata.getBaseVersion() );
+ results.setArtifactId( artifactId );
+ results.setGroupId( groupId );
+ results.setVersion( baseVersion );
results.setLastModified( lastModified );
metadataMap.put( key, results );
return results;
}
- private static String getMetadataKey( RepositoryMetadata metadata )
- {
- return getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
- }
-
public void removeArtifact( Artifact artifact )
{
Map map = artifactMap;
artifactMap.clear();
metadataMap.clear();
metadataWithProblems.clear();
+ filteredDatabases.clear();
reporting.getArtifacts().clear();
reporting.getMetadata().clear();
{
return reportGroup;
}
+
+ public ReportingDatabase getFilteredDatabase( String filter )
+ {
+ ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
+
+ if ( reportingDatabase == null )
+ {
+ reportingDatabase = new ReportingDatabase( reportGroup, repository );
+
+ Reporting reporting = reportingDatabase.getReporting();
+ reporting.setExecutionTime( this.reporting.getExecutionTime() );
+ reporting.setLastModified( this.reporting.getLastModified() );
+
+ for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) i.next();
+ ArtifactResults targetResults = null;
+ for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
+ {
+ Result result = (Result) j.next();
+
+ if ( filter.equals( result.getProcessor() ) )
+ {
+ if ( targetResults == null )
+ {
+ // lazily create so it is not added unless it has to be
+ targetResults = createArtifactResults( reportingDatabase, results );
+ }
+
+ targetResults.addFailure( result );
+ reportingDatabase.numFailures++;
+ }
+ }
+ for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
+ {
+ Result result = (Result) j.next();
+
+ if ( filter.equals( result.getProcessor() ) )
+ {
+ if ( targetResults == null )
+ {
+ // lazily create so it is not added unless it has to be
+ targetResults = createArtifactResults( reportingDatabase, results );
+ }
+
+ targetResults.addWarning( result );
+ reportingDatabase.numWarnings++;
+ }
+ }
+ }
+ for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) i.next();
+ MetadataResults targetResults = null;
+ for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
+ {
+ Result result = (Result) j.next();
+
+ if ( filter.equals( result.getProcessor() ) )
+ {
+ if ( targetResults == null )
+ {
+ // lazily create so it is not added unless it has to be
+ targetResults = createMetadataResults( reportingDatabase, results );
+ }
+
+ targetResults.addFailure( result );
+ reportingDatabase.numFailures++;
+ }
+ }
+ for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
+ {
+ Result result = (Result) j.next();
+
+ if ( filter.equals( result.getProcessor() ) )
+ {
+ if ( targetResults == null )
+ {
+ // lazily create so it is not added unless it has to be
+ targetResults = createMetadataResults( reportingDatabase, results );
+ }
+
+ targetResults.addWarning( result );
+ reportingDatabase.numWarnings++;
+ }
+ }
+ }
+
+ filteredDatabases.put( filter, reportingDatabase );
+ }
+
+ return reportingDatabase;
+ }
+
+ private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+ {
+ MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
+ results.getArtifactId(),
+ results.getVersion(),
+ results.getLastModified() );
+ reportingDatabase.metadataWithProblems.add( targetResults );
+ return targetResults;
+ }
+
+ private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+ {
+ return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
+ results.getVersion(), results.getType(), results.getClassifier() );
+ }
}
<description>
The reason given for the result.
</description>
+ <required>true</required>
+ </field>
+ <field xml.attribute="true">
+ <name>processor</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <description>
+ The processor identifier for the report that triggered the problem. This matches the role-hint of a report
+ processor.
+ </description>
+ <required>true</required>
+ </field>
+ <field xml.attribute="true">
+ <name>problem</name>
+ <version>1.0.0</version>
+ <type>String</type>
+ <description>
+ The problem identifier for the problem that occurred. This is so that the processor can identify how to
+ fix the problem. It may be null if it cannot be fixed automatically.
+ </description>
</field>
</fields>
</class>
+++ /dev/null
-package org.apache.maven.archiva.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class ArtifactReporterTest
- extends AbstractRepositoryReportsTestCase
-{
- private ReportingDatabase reportingDatabase;
-
- private Artifact artifact;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.setLastUpdated( "20050611.202020" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
- }
-
- public void testArtifactReporterSingleFailure()
- {
- reportingDatabase.addFailure( artifact, "failed once" );
-
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
-
- int count = 0;
- for ( Iterator i = results.getFailures().iterator(); i.hasNext(); count++ )
- {
- i.next();
- }
- assertEquals( 1, count );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- }
-
- public void testArtifactReporterMultipleFailure()
- {
- reportingDatabase.addFailure( artifact, "failed once" );
- reportingDatabase.addFailure( artifact, "failed twice" );
- reportingDatabase.addFailure( artifact, "failed thrice" );
-
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
-
- int count = 0;
- for ( Iterator i = results.getFailures().iterator(); i.hasNext(); count++ )
- {
- i.next();
- }
- assertEquals( 3, count );
- assertEquals( 3, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- }
-
- public void testFailureMessages()
- {
- reportingDatabase.addFailure( artifact, "failed once" );
- reportingDatabase.addFailure( artifact, "failed twice" );
- reportingDatabase.addFailure( artifact, "failed thrice" );
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
- Iterator failure = results.getFailures().iterator();
- assertEquals( "failed once", ( (Result) failure.next() ).getReason() );
- assertEquals( "failed twice", ( (Result) failure.next() ).getReason() );
- assertEquals( "failed thrice", ( (Result) failure.next() ).getReason() );
- }
-
- public void testArtifactReporterSingleWarning()
- {
- reportingDatabase.addWarning( artifact, "you've been warned" );
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
-
- int count = 0;
- for ( Iterator i = results.getWarnings().iterator(); i.hasNext(); count++ )
- {
- i.next();
- }
- assertEquals( 1, count );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- }
-
- public void testArtifactReporterMultipleWarning()
- {
- reportingDatabase.addWarning( artifact, "i'm warning you" );
- reportingDatabase.addWarning( artifact, "you have to stop now" );
- reportingDatabase.addWarning( artifact, "all right... that does it!" );
-
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
-
- int count = 0;
- for ( Iterator i = results.getWarnings().iterator(); i.hasNext(); count++ )
- {
- i.next();
- }
- assertEquals( 3, count );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 3, reportingDatabase.getNumWarnings() );
- }
-
- public void testWarningMessages()
- {
- reportingDatabase.addWarning( artifact, "i'm warning you" );
- reportingDatabase.addWarning( artifact, "you have to stop now" );
- reportingDatabase.addWarning( artifact, "all right... that does it!" );
-
- Iterator artifactIterator = reportingDatabase.getArtifactIterator();
- ArtifactResults results = (ArtifactResults) artifactIterator.next();
- assertFalse( artifactIterator.hasNext() );
- Iterator warning = results.getWarnings().iterator();
- assertEquals( "i'm warning you", ( (Result) warning.next() ).getReason() );
- assertEquals( "you have to stop now", ( (Result) warning.next() ).getReason() );
- assertEquals( "all right... that does it!", ( (Result) warning.next() ).getReason() );
- }
-}
private RepositoryMetadata metadata;
+ private static final String PROCESSOR = "processor";
+
+ private static final String PROBLEM = "problem";
+
public void testEmptyArtifactReporter()
{
assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
public void testMetadataSingleFailure()
{
- reportingDatabase.addFailure( metadata, "Single Failure Reason" );
+ reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
Result result = (Result) failures.next();
assertMetadata( results );
assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertFalse( "no more failures", failures.hasNext() );
}
public void testMetadataMultipleFailures()
{
- reportingDatabase.addFailure( metadata, "First Failure Reason" );
- reportingDatabase.addFailure( metadata, "Second Failure Reason" );
+ reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+ reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
Result result = (Result) failures.next();
assertMetadata( results );
assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertTrue( "must have 2nd failure", failures.hasNext() );
result = (Result) failures.next();
assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertFalse( "no more failures", failures.hasNext() );
}
public void testMetadataSingleWarning()
{
- reportingDatabase.addWarning( metadata, "Single Warning Message" );
+ reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
Result result = (Result) warnings.next();
assertMetadata( results );
assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertFalse( "no more warnings", warnings.hasNext() );
}
public void testMetadataMultipleWarnings()
{
- reportingDatabase.addWarning( metadata, "First Warning" );
- reportingDatabase.addWarning( metadata, "Second Warning" );
+ reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+ reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
Result result = (Result) warnings.next();
assertMetadata( results );
assertEquals( "check failure reason", "First Warning", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertTrue( "must have 2nd warning", warnings.hasNext() );
result = (Result) warnings.next();
assertEquals( "check failure reason", "Second Warning", result.getReason() );
+ assertEquals( "check failure parameters", PROCESSOR, result.getProcessor() );
+ assertEquals( "check failure parameters", PROBLEM, result.getProblem() );
assertFalse( "no more warnings", warnings.hasNext() );
}
private static final String DEFAULT_REPORT_GROUP = "health";
+ private String filter;
+
public String execute()
throws Exception
{
ReportingDatabase database = executor.getReportDatabase( repository, reportGroup );
+ if ( filter != null && !filter.equals( "-" ) )
+ {
+ database = database.getFilteredDatabase( filter );
+ }
+
databases.add( database );
}
{
return reports;
}
+
+ public String getFilter()
+ {
+ return filter;
+ }
+
+ public void setFilter( String filter )
+ {
+ this.filter = filter;
+ }
}
<div id="contentArea">
-<%-- TODO!: select filter --%>
<ww:form action="reports" namespace="/admin">
- <ww:select list="reports" label="Report" name="reportGroup"/>
+ <ww:select list="reports" label="Report" name="reportGroup" onchange="document.reports.submit();"/>
<ww:select list="configuration.repositories" listKey="id" listValue="name" label="Repository" headerKey="-"
- headerValue="(All repositories)" name="repositoryId"/>
+ headerValue="(All repositories)" name="repositoryId" onchange="document.reports.submit();"/>
+ <ww:select list="reports[reportGroup].reports" label="Filter" headerKey="-" headerValue="(All Problems)"
+ name="filter" onchange="document.reports.submit();"/>
<ww:submit value="Get Report"/>
</ww:form>