import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
import org.apache.maven.archiva.discoverer.DiscovererException;
import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
import org.apache.maven.archiva.reporting.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.MetadataReportProcessor;
import org.apache.maven.archiva.reporting.ReportingDatabase;
+import org.apache.maven.archiva.reporting.ReportingMetadataFilter;
import org.apache.maven.archiva.reporting.ReportingStore;
import org.apache.maven.archiva.reporting.ReportingStoreException;
import org.apache.maven.archiva.scheduler.TaskExecutionException;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
import org.apache.maven.model.Model;
import org.apache.maven.project.MavenProject;
// run the reports
runArtifactReports( currentArtifacts, reporter );
+ // store intermittently because if anything crashes out after indexing then we will have
+ // lost track of these artifact's reports
+ reportingStore.storeReports( reporter, repository );
+
index.indexArtifacts( currentArtifacts, recordFactory );
}
flushProjectBuilderCacheHack();
}
- // TODO! use reporting manager as a filter
+ MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
+
MetadataDiscoverer metadataDiscoverer =
(MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
- metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns );
+ List metadata =
+ metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
- //TODO! metadata reporting
+ if ( !metadata.isEmpty() )
+ {
+ getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
+
+ // run the reports
+ runMetadataReports( metadata, repository, reporter );
+ }
reportingStore.storeReports( reporter, repository );
}
getLogger().info( "Finished repository indexing process in " + time + "ms" );
}
+ private void runMetadataReports( List metadata, ArtifactRepository repository, ReportingDatabase reporter )
+ {
+ for ( Iterator i = metadata.iterator(); i.hasNext(); )
+ {
+ RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
+
+ File file =
+ new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+ reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
+
+ // TODO: should the report set be limitable by configuration?
+ runMetadataReports( repositoryMetadata, repository, reporter );
+ }
+ }
+
+ private void runMetadataReports( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
+ ReportingDatabase reporter )
+ {
+ for ( Iterator i = metadataReports.iterator(); i.hasNext(); )
+ {
+ MetadataReportProcessor report = (MetadataReportProcessor) i.next();
+
+ report.processMetadata( repositoryMetadata, repository, reporter );
+ }
+ }
+
private void runArtifactReports( List artifacts, ReportingDatabase reporter )
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
reporter.addWarning( artifact, "Error reading project model: " + e );
}
+
+ reporter.removeArtifact( artifact );
+
runArtifactReports( artifact, model, reporter );
}
}
* limitations under the License.
*/
+import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
+import java.io.FileReader;
import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
import java.io.Reader;
-import java.net.MalformedURLException;
-import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
*/
private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
- public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
+ public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter )
throws DiscovererException
{
if ( !"file".equals( repository.getProtocol() ) )
try
{
RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
- metadataFiles.add( metadata );
+ File f = new File( repository.getBasedir(), metadataPath );
+ if ( filter.include( metadata, f.lastModified() ) )
+ {
+ metadataFiles.add( metadata );
+ }
+ else
+ {
+ addExcludedPath( metadataPath, "Metadata excluded by filter" );
+ }
}
catch ( DiscovererException e )
{
return metadataFiles;
}
+ public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
+ throws DiscovererException
+ {
+ return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
+ }
+
private RepositoryMetadata buildMetadata( String repo, String metadataPath )
throws DiscovererException
{
Metadata m;
- String repoPath = repo + "/" + metadataPath;
+ File f = new File( repo, metadataPath );
try
{
- URL url = new File( repoPath ).toURI().toURL();
- InputStream is = url.openStream();
- Reader reader = new InputStreamReader( is );
+ Reader reader = new FileReader( f );
MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
m = metadataReader.read( reader );
}
catch ( XmlPullParserException e )
{
- throw new DiscovererException( "Error parsing metadata file '" + repoPath + "': " + e.getMessage(), e );
- }
- catch ( MalformedURLException e )
- {
- // shouldn't happen
- throw new DiscovererException( "Error constructing metadata file '" + repoPath + "': " + e.getMessage(),
- e );
+ throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
}
catch ( IOException e )
{
- throw new DiscovererException( "Error reading metadata file '" + repoPath + "': " + e.getMessage(), e );
+ throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
}
RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
* limitations under the License.
*/
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
import org.apache.maven.artifact.repository.ArtifactRepository;
import java.util.List;
{
String ROLE = MetadataDiscoverer.class.getName();
+ /**
+ * Search for metadata files in the repository.
+ *
+ * @param repository The repository.
+ * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
+ * @param metadataFilter filter to use on the discovered metadata before returning
+ * @return the list of artifacts found
+ * @throws DiscovererException if there is a problem during the discovery process
+ */
+ List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter )
+ throws DiscovererException;
+
/**
* Search for metadata files in the repository.
*
--- /dev/null
+package org.apache.maven.archiva.discoverer.filter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * Filter that accepts all.
+ */
+public class AcceptAllMetadataFilter
+ implements MetadataFilter
+{
+ public boolean include( RepositoryMetadata metadata, long timestamp )
+ {
+ return true;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.filter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * Ability to filter repository metadata lists.
+ *
+ * @todo should be in maven-artifact
+ */
+public interface MetadataFilter
+{
+ /**
+ * Whether to include this metadata in the filtered list.
+ *
+ * @param metadata the metadata
+ * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated
+ * @return whether to include it
+ */
+ boolean include( RepositoryMetadata metadata, long timestamp );
+}
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-indexer</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-discoverer</artifactId>
+ </dependency>
</dependencies>
<build>
<plugins>
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
}
else
{
- String lastUpdated = metadata.getMetadata().getVersioning().getLastUpdated();
- if ( lastUpdated == null || lastUpdated.length() == 0 )
+ Versioning versioning = metadata.getMetadata().getVersioning();
+ boolean found = false;
+ if ( versioning != null )
+ {
+ String lastUpdated = versioning.getLastUpdated();
+ if ( lastUpdated != null && lastUpdated.length() != 0 )
+ {
+ found = true;
+ }
+ }
+ if ( !found )
{
reporter.addFailure( metadata, "Missing lastUpdated element inside the metadata." );
}
repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
Versioning versioning = metadata.getMetadata().getVersioning();
- for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
+ if ( versioning != null )
{
- String version = (String) versions.next();
+ for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
+ {
+ String version = (String) versions.next();
- Artifact artifact =
- artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+ Artifact artifact =
+ artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
- if ( !repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
- "missing in the repository." );
+ if ( !repositoryQueryLayer.containsArtifact( artifact ) )
+ {
+ reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
+ "missing in the repository." );
+ }
}
}
}
throws IOException
{
Versioning versioning = metadata.getMetadata().getVersioning();
+ List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
File versionsDir =
new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
{
File path = new File( (String) i.next() );
String version = path.getParentFile().getName();
- if ( !versioning.getVersions().contains( version ) )
+ if ( !metadataVersions.contains( version ) )
{
reporter.addFailure( metadata, "Artifact version " + version + " found in the repository but " +
"missing in the metadata." );
public void addFailure( RepositoryMetadata metadata, String reason )
{
- MetadataResults results = getMetadataResults( metadata );
+ MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
results.addFailure( createResults( reason ) );
totalFailures++;
}
public void addWarning( RepositoryMetadata metadata, String reason )
{
- MetadataResults results = getMetadataResults( metadata );
+ MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
results.addWarning( createResults( reason ) );
totalWarnings++;
}
- private MetadataResults getMetadataResults( RepositoryMetadata metadata )
- {
- Map metadataMap = getMetadataMap();
-
- String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
-
- MetadataResults results = (MetadataResults) metadataMap.get( key );
- if ( results == null )
- {
- results = new MetadataResults();
- results.setArtifactId( metadata.getArtifactId() );
- results.setGroupId( metadata.getGroupId() );
- results.setVersion( metadata.getBaseVersion() );
-
- metadataMap.put( key, results );
- reporting.getMetadata().add( results );
- }
-
- return results;
- }
-
private Map getMetadataMap()
{
if ( metadataMap == null )
{
return reporting.getMetadata().iterator();
}
+
+ public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+ {
+ String key = getMetadataKey( metadata );
+ Map map = getMetadataMap();
+ MetadataResults results = (MetadataResults) map.get( key );
+ return results != null && results.getLastModified() >= timestamp;
+ }
+
+ /**
+ * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
+ *
+ * @param metadata the metadata
+ * @param lastModified the modification time of the file being tracked
+ */
+ public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
+ {
+ MetadataResults results = getMetadataResults( metadata, lastModified );
+
+ results.setLastModified( lastModified );
+ results.getFailures().clear();
+ results.getWarnings().clear();
+ }
+
+ private MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
+ {
+ String key = getMetadataKey( metadata );
+ Map metadataMap = getMetadataMap();
+ MetadataResults results = (MetadataResults) metadataMap.get( key );
+ if ( results == null )
+ {
+ results = new MetadataResults();
+ results.setArtifactId( metadata.getArtifactId() );
+ results.setGroupId( metadata.getGroupId() );
+ results.setVersion( metadata.getBaseVersion() );
+ results.setLastModified( lastModified );
+
+ metadataMap.put( key, results );
+ reporting.getMetadata().add( results );
+ }
+ return results;
+ }
+
+ private static String getMetadataKey( RepositoryMetadata metadata )
+ {
+ return getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
+ }
+
+ public void removeArtifact( Artifact artifact )
+ {
+ Map map = getArtifactMap();
+
+ String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getType(), artifact.getClassifier() );
+ ArtifactResults results = (ArtifactResults) map.get( key );
+ if ( results != null )
+ {
+ for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
+ {
+ if ( results.equals( i.next() ) )
+ {
+ i.remove();
+ }
+ }
+ map.remove( key );
+ }
+ }
}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * Implementation of a reporting filter. Artifacts already in the database are ignored.
+ */
+public class ReportingMetadataFilter
+ implements MetadataFilter
+{
+ private final ReportingDatabase reporter;
+
+ public ReportingMetadataFilter( ReportingDatabase reporter )
+ {
+ this.reporter = reporter;
+ }
+
+ public boolean include( RepositoryMetadata metadata, long timestamp )
+ {
+ return !reporter.isMetadataUpToDate( metadata, timestamp );
+ }
+}
The version of the metadata in the result.
</description>
</field>
+ <field xml.attribute="true">
+ <name>lastModified</name>
+ <version>1.0.0</version>
+ <type>long</type>
+ <identity>true</identity>
+ <description>
+ The time that the metadata was last modified.
+ </description>
+ </field>
</fields>
</class>
<class>
assertFalse( "check no more failures", failures.hasNext() );
}
+ public void testMetadataMissingVersioning()
+ {
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getMetadataIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ assertMetadata( metadata, results );
+ Result result = (Result) failures.next();
+ assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
+ result = (Result) failures.next();
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
+ result.getReason() );
+ result = (Result) failures.next();
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
public void testMetadataValidVersions()
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );