import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
import org.apache.maven.archiva.discoverer.DiscovererException;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.tools.cli.AbstractCli;
try
{
legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath,
- blacklistedPatterns, true );
+ true );
}
catch ( RepositoryConversionException e )
{
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.Model;
import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
import org.apache.maven.model.converter.ModelConverter;
import org.apache.maven.model.converter.PomTranslationException;
import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
/**
* @plexus.requirement
*/
- private ArtifactPomRewriter rewriter;
+ private ModelConverter translator;
/**
* @plexus.requirement
*/
- private ModelConverter translator;
-
+ private ArtifactHandlerManager artifactHandlerManager;
+
/**
* @plexus.requirement
*/
- private ArtifactHandlerManager artifactHandlerManager;
+ private ReportingDatabase reportingDatabase;
/**
* @plexus.configuration default-value="false"
*/
private I18N i18n;
- public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ public void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
}
- if ( validateMetadata( artifact, reporter ) )
+ if ( validateMetadata( artifact ) )
{
FileTransaction transaction = new FileTransaction();
- if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+ if ( copyPom( artifact, targetRepository, transaction ) )
{
- if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+ if ( copyArtifact( artifact, targetRepository, transaction ) )
{
Metadata metadata = createBaseMetadata( artifact );
Versioning versioning = new Versioning();
return metadata;
}
- private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
+ private boolean validateMetadata( Artifact artifact )
throws RepositoryConversionException
{
ArtifactRepository repository = artifact.getRepository();
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = validateMetadata( metadata, repositoryMetadata, artifact );
}
repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = result && validateMetadata( metadata, repositoryMetadata, artifact );
}
return result;
}
- private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
- ReportingDatabase reporter )
+ private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
{
String groupIdKey;
String artifactIdKey = null;
if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
{
- addFailure( reporter, artifact, groupIdKey );
+ addFailure( artifact, groupIdKey );
result = false;
}
if ( !repositoryMetadata.storedInGroupDirectory() )
{
if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
{
- addFailure( reporter, artifact, artifactIdKey );
+ addFailure( artifact, artifactIdKey );
result = false;
}
if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
if ( !foundVersion )
{
- addFailure( reporter, artifact, versionsKey );
+ addFailure( artifact, versionsKey );
result = false;
}
}
// snapshot metadata
if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
{
- addFailure( reporter, artifact, versionKey );
+ addFailure( artifact, versionKey );
result = false;
}
if ( !correct )
{
- addFailure( reporter, artifact, snapshotKey );
+ addFailure( artifact, snapshotKey );
result = false;
}
}
return result;
}
- private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+ private void addFailure( Artifact artifact, String key )
{
- addFailureWithReason( reporter, artifact, getI18NString( key ) );
+ addFailureWithReason( artifact, getI18NString( key ) );
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+ private void addWarning( Artifact artifact, String message )
{
// TODO: should we be able to identify/fix these?
- reporter.addWarning( artifact, null, null, message );
+ reportingDatabase.getArtifactDatabase().addWarning( artifact, null, null, message );
}
- private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+ private void addFailureWithReason( Artifact artifact, String reason )
{
// TODO: should we be able to identify/fix these?
- reporter.addFailure( artifact, null, null, reason );
+ reportingDatabase.getArtifactDatabase().addFailure( artifact, null, null, reason );
}
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
boolean checksumsValid = false;
try
{
- if ( testChecksums( artifact, file, reporter ) )
+ if ( testChecksums( artifact, file ) )
{
checksumsValid = true;
}
for ( Iterator i = warnings.iterator(); i.hasNext(); )
{
String message = (String) i.next();
- addWarning( reporter, artifact, message );
+ addWarning( artifact, message );
}
}
catch ( XmlPullParserException e )
{
- addFailureWithReason( reporter, artifact,
+ addFailureWithReason( artifact,
getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
}
catch ( PomTranslationException e )
{
- addFailureWithReason( reporter, artifact,
+ addFailureWithReason( artifact,
getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
}
else
{
- addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
+ addWarning( artifact, getI18NString( "warning.missing.pom" ) );
}
return result;
}
return i18n.getString( getClass().getName(), Locale.getDefault(), key );
}
- private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
+ private boolean testChecksums( Artifact artifact, File file )
throws IOException
{
boolean result = true;
{
Digester digester = (Digester) it.next();
result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
- reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
+ artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
}
return result;
}
return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
}
- private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
- Artifact artifact, String key )
+ private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact,
+ String key )
throws IOException
{
boolean result = true;
}
catch ( DigesterException e )
{
- addFailure( reporter, artifact, key );
+ addFailure( artifact, key );
result = false;
}
}
return result;
}
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
File sourceFile = artifact.getFile();
matching = FileUtils.contentEquals( sourceFile, targetFile );
if ( !matching )
{
- addFailure( reporter, artifact, "failure.target.already.exists" );
+ addFailure( artifact, "failure.target.already.exists" );
result = false;
}
}
{
if ( force || !matching )
{
- if ( testChecksums( artifact, sourceFile, reporter ) )
+ if ( testChecksums( artifact, sourceFile ) )
{
transaction.copyFile( sourceFile, targetFile, digesters );
}
return result;
}
- public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ public void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
try
{
- convert( artifact, targetRepository, reporter );
+ convert( artifact, targetRepository );
}
catch ( RepositoryConversionException e )
{
// the stack trace would be useful. I also have no idea what a processor is currently or
// how to get hold of it here.
- reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
+ reportingDatabase.getArtifactDatabase().addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
}
}
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
*
* @param artifact the artifact to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversion
*/
- void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException;
/**
*
* @param artifacts the set of artifacts to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversions
*/
- void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException;
}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumerFactory;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Jason van Zyl
+ * @plexus.component
+ * @todo turn this into a general conversion component and hide all this crap here.
+ * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
+ */
+public class DefaultLegacyRepositoryConverter
+ implements LegacyRepositoryConverter
+{
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private ArtifactRepositoryLayout legacyLayout;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private ArtifactRepositoryLayout defaultLayout;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactRepositoryFactory artifactRepositoryFactory;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private Discoverer discoverer;
+
+ /**
+ * @plexus.requirement
+ */
+ private DiscovererConsumerFactory consumerFactory;
+
+ public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
+ boolean includeSnapshots )
+ throws RepositoryConversionException, DiscovererException
+ {
+ ArtifactRepository legacyRepository;
+
+ ArtifactRepository repository;
+
+ try
+ {
+ legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDirectory
+ .toURI().toURL().toString(), legacyLayout, null, null );
+
+ repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDirectory.toURI()
+ .toURL().toString(), defaultLayout, null, null );
+ }
+ catch ( MalformedURLException e )
+ {
+ throw new RepositoryConversionException( "Error convering legacy repository.", e );
+ }
+
+ List consumers = new ArrayList();
+
+ LegacyConverterArtifactConsumer consumer = (LegacyConverterArtifactConsumer) consumerFactory
+ .createConsumer( "legacy-converter" );
+ consumer.setDestinationRepository( repository );
+
+ consumers.add( consumer );
+
+ discoverer.walkRepository( legacyRepository, consumers, includeSnapshots );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * LegacyConverterArtifactConsumer - convert artifacts as they are found
+ * into the destination repository.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ * role-hint="legacy-converter"
+ * instantiation-strategy="per-lookup"
+ */
+public class LegacyConverterArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryConverter repositoryConverter;
+
+ private ArtifactRepository destinationRepository;
+
+ public void processArtifact( Artifact artifact, File file )
+ {
+ try
+ {
+ repositoryConverter.convert( artifact, destinationRepository );
+ }
+ catch ( RepositoryConversionException e )
+ {
+ getLogger().error(
+ "Unable to convert artifact " + artifact + " to destination repository "
+ + destinationRepository, e );
+ }
+ }
+
+ public void processArtifactBuildFailure( File path, String message )
+ {
+ getLogger().error( "Artifact Build Failure on " + path + " : " + message );
+ }
+
+ public ArtifactRepository getDestinationRepository()
+ {
+ return destinationRepository;
+ }
+
+ public void setDestinationRepository( ArtifactRepository destinationRepository )
+ {
+ this.destinationRepository = destinationRepository;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+import java.io.File;
+
+/**
+ * @author Jason van Zyl
+ */
+public interface LegacyRepositoryConverter
+{
+ String ROLE = LegacyRepositoryConverter.class.getName();
+
+ /**
+ * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
+ * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
+ *
+ * @param legacyRepositoryDirectory
+ * @param repositoryDirectory
+ * @throws org.apache.maven.archiva.converter.RepositoryConversionException
+ *
+ */
+ void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, boolean includeSnapshots )
+ throws RepositoryConversionException, DiscovererException;
+}
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
i18n = (I18N) lookup( I18N.ROLE );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
artifactFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
assertTrue( "Check artifact created", artifactFile.exists() );
targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
//checkSuccess(); --> commented until MNG-2100 is fixed
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() );
assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
artifact.setFile(
new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
// There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
// the plugin is being coverted correctly.
//checkSuccess();
targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
// test that a POM is not created when there was none at the source
Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() );
assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
compareFiles( sourceFile, targetFile );
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
getFailure().getReason() );
sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
compareFiles( sourceFile, targetFile );
File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
assertTrue( "Check source file exists", sourceFile.exists() );
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
getFailure().getReason() );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
- repositoryConverter.convert( artifacts, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifacts, targetRepository );
assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
getFailure().getReason() );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkFailure();
assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
getFailure().getReason() );
Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
checkSuccess();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
try
{
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
fail( "Should have failed trying to convert within the same repository" );
}
catch ( RepositoryConversionException e )
/**
* ManagedArtifact
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedArtifact
/**
* ManagedArtifactTypes - provides place to test an unknown artifact type.
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedArtifactTypes
/**
* ManagedEjbArtifact - adds the ability to reference the ejb-client jar too.
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedEjbArtifact
* ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source
* reference jars.
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedJavaArtifact
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+
+import java.io.File;
+import java.util.Collections;
+
+/**
+ * ArtifactHealthConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ * role-hint="artifact-health"
+ * instantiation-strategy="per-lookup"
+ */
+public class ArtifactHealthConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
+ /**
+ * @plexus.requirement role-hint="health"
+ */
+ private ReportGroup health;
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ public void processArtifact( Artifact artifact, File file )
+ {
+ Model model = null;
+ try
+ {
+ Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+ .getArtifactId(), artifact.getVersion() );
+ MavenProject project = projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+ model = project.getModel();
+ }
+ catch ( InvalidArtifactRTException e )
+ {
+ database.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
+ }
+ catch ( ProjectBuildingException e )
+ {
+ database.addWarning( artifact, null, null, "Error reading project model: " + e );
+ }
+
+ database.remove( artifact );
+ health.processArtifact( artifact, model );
+ }
+
+ public void processArtifactBuildFailure( File path, String message )
+ {
+ /* do nothing here (yet) */
+ // TODO: store build failure into database?
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.discoverer.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * IndexArtifactConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ * role-hint="index-artifact"
+ * instantiation-strategy="per-lookup"
+ */
+public class IndexArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory indexFactory;
+
+ /**
+ * @plexus.requirement role-hint="standard"
+ */
+ private RepositoryIndexRecordFactory recordFactory;
+
+ /**
+ * Configuration store.
+ *
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
+
+ private RepositoryArtifactIndex index;
+
+ public boolean init( ArtifactRepository repository )
+ {
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ File indexPath = new File( configuration.getIndexPath() );
+
+ index = indexFactory.createStandardIndex( indexPath );
+
+ return super.init( repository );
+ }
+
+ public void processArtifact( Artifact artifact, File file )
+ {
+ try
+ {
+ index.indexArtifact( artifact, recordFactory );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ getLogger().warn( "Unable to index artifact " + artifact, e );
+ }
+ }
+
+ public void processArtifactBuildFailure( File path, String message )
+ {
+ // TODO Auto-generated method stub
+
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.consumers.GenericRepositoryMetadataConsumer;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.io.File;
+
+/**
+ * RepositoryMetadataHealthConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumer"
+ * role-hint="metadata-health"
+ * instantiation-strategy="per-lookup"
+ */
+public class RepositoryMetadataHealthConsumer
+ extends GenericRepositoryMetadataConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
+ /**
+ * @plexus.requirement role-hint="health"
+ */
+ private ReportGroup health;
+
+ public void processRepositoryMetadata( RepositoryMetadata metadata, File file )
+ {
+ MetadataResults results = database.getMetadataResults( metadata );
+ database.clearResults( results );
+
+ health.processMetadata( metadata, repository );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.converter.RepositoryConverter;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- * @plexus.component
- * @todo turn this into a general conversion component and hide all this crap here.
- * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
- */
-public class DefaultLegacyRepositoryConverter
- implements LegacyRepositoryConverter
-{
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactDiscoverer artifactDiscoverer;
-
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactRepositoryLayout legacyLayout;
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private ArtifactRepositoryLayout defaultLayout;
-
- /**
- * @plexus.requirement
- */
- private ArtifactRepositoryFactory artifactRepositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private RepositoryConverter repositoryConverter;
-
- /**
- * @plexus.requirement
- */
- private ReportingStore reportingStore;
-
- /**
- * @plexus.requirement role-hint="health"
- */
- private ReportGroup reportGroup;
-
- public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
- List blacklistedPatterns, boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException
- {
- ArtifactRepository legacyRepository;
-
- ArtifactRepository repository;
-
- try
- {
- legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
- legacyRepositoryDirectory.toURI().toURL().toString(),
- legacyLayout, null, null );
-
- repository = artifactRepositoryFactory.createArtifactRepository( "default",
- repositoryDirectory.toURI().toURL().toString(),
- defaultLayout, null, null );
- }
- catch ( MalformedURLException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
-
- ArtifactFilter filter =
- includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter();
- List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, blacklistedPatterns, filter );
-
- ReportingDatabase reporter;
- try
- {
- reporter = reportingStore.getReportsFromStore( repository, reportGroup );
-
- repositoryConverter.convert( legacyArtifacts, repository, reporter );
-
- reportingStore.storeReports( reporter, repository );
- }
- catch ( ReportingStoreException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- */
-public interface LegacyRepositoryConverter
-{
- String ROLE = LegacyRepositoryConverter.class.getName();
-
- /**
- * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
- * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
- *
- * @param legacyRepositoryDirectory
- * @param repositoryDirectory
- * @throws org.apache.maven.archiva.converter.RepositoryConversionException
- *
- */
- void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, List blacklistedPatterns,
- boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException;
-}
/**
* ActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public interface ActiveManagedRepositories
/**
* DefaultActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories"
*/
import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
private Scheduler scheduler;
/**
- * @plexus.requirement role-hint="indexer"
+ * @plexus.requirement role-hint="data-refresh"
*/
- private TaskQueue indexerQueue;
-
- /**
- * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
- private IndexerTaskExecutor indexerTaskExecutor;
+ private TaskQueue datarefreshQueue;
/**
* @plexus.requirement
JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
JobDataMap dataMap = new JobDataMap();
- dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
+ dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
jobDetail.setJobDataMap( dataMap );
}
}
- public void runIndexer()
+ public void runDataRefresh()
throws org.apache.maven.archiva.scheduler.TaskExecutionException
{
- IndexerTask task = new IndexerTask();
- task.setJobName( "INDEX_INIT" );
+ DataRefreshTask task = new DataRefreshTask();
+ task.setJobName( "DATA_REFRESH_INIT" );
try
{
- indexerQueue.put( task );
+ datarefreshQueue.put( task );
}
catch ( TaskQueueException e )
{
RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
if ( !artifactIndex.exists() )
{
- runIndexer();
+ runDataRefresh();
}
}
catch ( RepositoryIndexException e )
* under the License.
*/
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.scheduler.AbstractJob;
import org.codehaus.plexus.taskqueue.TaskQueue;
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
setJobDataMap( dataMap );
- TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+ TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
- RepositoryTask task = new IndexerTask();
+ RepositoryTask task = new DataRefreshTask();
task.setJobName( context.getJobDetail().getName() );
try
{
- if ( indexerQueue.getQueueSnapshot().size() == 0 )
+ if ( taskQueue.getQueueSnapshot().size() == 0 )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else
{
if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
{
*/
String ROLE = RepositoryTaskScheduler.class.getName();
- void runIndexer()
+ void runDataRefresh()
throws TaskExecutionException;
}
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererConsumerFactory;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DataRefreshExecutor
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
+ * role-hint="data-refresh"
+ */
+public class DataRefreshExecutor
+ extends AbstractLogEnabled
+ implements TaskExecutor
+{
+ /**
+ * Configuration store.
+ *
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repoFactory;
+
+ /**
+ * @plexus.configuration default-value="index-artifact"
+ */
+ private List consumerNames;
+
+ /**
+ * @plexus.requirement
+ */
+ private Discoverer discoverer;
+
+ /**
+ * @plexus.requirement
+ */
+ private DiscovererConsumerFactory consumerFactory;
+
+ public void executeTask( Task task )
+ throws TaskExecutionException
+ {
+ DataRefreshTask indexerTask = (DataRefreshTask) task;
+
+ getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
+
+ execute();
+ }
+
+ private String toHumanTimestamp( long timestamp )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat();
+ return dateFormat.format( new Date( timestamp ) );
+ }
+
+ public void execute()
+ throws TaskExecutionException
+ {
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ List consumers = new ArrayList();
+
+ for ( Iterator it = consumerNames.iterator(); it.hasNext(); )
+ {
+ String name = (String) it.next();
+ try
+ {
+ DiscovererConsumer consumer = consumerFactory.createConsumer( name );
+ consumers.add( consumer );
+ }
+ catch ( DiscovererException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+ }
+
+ long time = System.currentTimeMillis();
+
+ for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+ {
+ RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+ if ( !repositoryConfiguration.isIndexed() )
+ {
+ continue;
+ }
+
+ ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+
+ List filteredConsumers = filterConsumers( consumers, repository );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, filteredConsumers,
+ repositoryConfiguration.isIncludeSnapshots() );
+
+ getLogger().info( "----------------------------------------------------" );
+ getLogger().info( "Scan of Repository: " + repository.getId() );
+ getLogger().info( " Started : " + toHumanTimestamp( stats.getTimestampStarted() ) );
+ getLogger().info( " Finished: " + toHumanTimestamp( stats.getTimestampFinished() ) );
+ // TODO: pretty print ellapsed time.
+ getLogger().info( " Duration: " + stats.getElapsedMilliseconds() + "ms" );
+ getLogger().info( " Files : " + stats.getFilesIncluded() );
+ getLogger().info( " Consumed: " + stats.getFilesConsumed() );
+ getLogger().info( " Skipped : " + stats.getFilesSkipped() );
+ }
+
+ time = System.currentTimeMillis() - time;
+
+ getLogger().info( "Finished data refresh process in " + time + "ms." );
+ }
+
+ /**
+ * Not all consumers work with all repositories.
+ * This will filter out those incompatible consumers based on the provided repository.
+ *
+ * @param consumers the initial list of consumers.
+ * @param repository the repository to test consumer against.
+ * @return the filtered list of consumers.
+ */
+ private List filterConsumers( List consumers, ArtifactRepository repository )
+ {
+ List filtered = new ArrayList();
+
+ for ( Iterator it = consumers.iterator(); it.hasNext(); )
+ {
+ DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+ if ( consumer.init( repository ) )
+ {
+ // Approved!
+ filtered.add( consumer );
+ }
+ else
+ {
+ getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository );
+ }
+ }
+
+ return filtered;
+ }
+
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.filter.ReportingMetadataFilter;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.taskqueue.Task;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Edwin Punzalan
- * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
-public class IndexerTaskExecutor
- extends AbstractLogEnabled
- implements TaskExecutor
-{
- /**
- * Configuration store.
- *
- * @plexus.requirement
- */
- private ArchivaConfiguration archivaConfiguration;
-
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory indexFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repoFactory;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
- */
- private Map artifactDiscoverers;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
- */
- private Map metadataDiscoverers;
-
- /**
- * @plexus.requirement role-hint="standard"
- */
- private RepositoryIndexRecordFactory recordFactory;
-
- /**
- * @plexus.requirement
- */
- private ReportExecutor reportExecutor;
-
- /**
- * @plexus.requirement role-hint="health"
- */
- private ReportGroup reportGroup;
-
- private long lastIndexingTime = 0;
-
- private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
- public long getLastIndexingTime()
- {
- return lastIndexingTime;
- }
-
- public void executeTask( Task task )
- throws TaskExecutionException
- {
- IndexerTask indexerTask = (IndexerTask) task;
-
- getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
-
- execute();
- }
-
- public void execute()
- throws TaskExecutionException
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- execute( configuration, indexPath );
- }
-
- public void executeNowIfNeeded()
- throws TaskExecutionException
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- try
- {
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- if ( !artifactIndex.exists() )
- {
- execute( configuration, indexPath );
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- }
-
- private void execute( Configuration configuration, File indexPath )
- throws TaskExecutionException
- {
- long time = System.currentTimeMillis();
- getLogger().info( "Starting repository indexing process" );
-
- RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
-
- try
- {
- Collection keys;
- if ( index.exists() )
- {
- keys = index.getAllRecordKeys();
- }
- else
- {
- keys = Collections.EMPTY_LIST;
- }
-
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
- if ( repositoryConfiguration.isIndexed() )
- {
- List blacklistedPatterns = new ArrayList();
- if ( repositoryConfiguration.getBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
- }
- if ( configuration.getGlobalBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
- }
- boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
- ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
- ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
-
- // keep original value in case there is another process under way
- long origStartTime = reporter.getStartTime();
- reporter.setStartTime( System.currentTimeMillis() );
-
- // Discovery process
- String layoutProperty = repositoryConfiguration.getLayout();
- ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
- AndArtifactFilter filter = new AndArtifactFilter();
- filter.add( new IndexRecordExistsArtifactFilter( keys ) );
- if ( !includeSnapshots )
- {
- filter.add( new SnapshotArtifactFilter() );
- }
-
- // Save some memory by not tracking paths we won't use
- // TODO: Plexus CDC should be able to inject this configuration
- discoverer.setTrackOmittedPaths( false );
-
- getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
- List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
- if ( !artifacts.isEmpty() )
- {
- getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
-
- // Work through these in batches, then flush the project cache.
- for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
- {
- int end = j + ARTIFACT_BUFFER_SIZE;
- List currentArtifacts =
- artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
- // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
- // run the reports. Done intermittently to avoid losing track of what is indexed since
- // that is what the filter is based on.
- reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
-
- index.indexArtifacts( currentArtifacts, recordFactory );
-
- // MRM-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
- // around that. TODO: remove when it is configurable
- flushProjectBuilderCacheHack();
- }
- }
-
- MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
-
- MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers
- .get( layoutProperty );
- List metadata =
- metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
-
- if ( !metadata.isEmpty() )
- {
- getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
-
- // run the reports
- reportExecutor.runMetadataReports( reportGroup, metadata, repository );
- }
-
- reporter.setStartTime( origStartTime );
- }
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( DiscovererException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( ReportingStoreException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
-
- time = System.currentTimeMillis() - time;
- lastIndexingTime = System.currentTimeMillis();
- getLogger().info( "Finished repository indexing process in " + time + "ms" );
- }
-
- /**
- * @todo remove when no longer needed (MRM-142)
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- private void flushProjectBuilderCacheHack()
- {
- try
- {
- if ( projectBuilder != null )
- {
- getLogger().info( "projectBuilder is type " + projectBuilder.getClass().getName() );
-
- java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
- f.setAccessible( true );
- Map cache = (Map) f.get( projectBuilder );
- getLogger().info( "projectBuilder.raw is type " + cache.getClass().getName() );
- cache.clear();
-
- f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
- f.setAccessible( true );
- cache = (Map) f.get( projectBuilder );
- getLogger().info( "projectBuilder.processed is type " + cache.getClass().getName() );
- cache.clear();
- }
- }
- catch ( NoSuchFieldException e )
- {
- throw new RuntimeException( e );
- }
- catch ( IllegalAccessException e )
- {
- throw new RuntimeException( e );
- }
- }
-}
--- /dev/null
+package org.apache.maven.archiva.scheduler.task;
+
+/**
+ * DataRefreshTask - task for discovering changes in the repository
+ * and updating all associated data.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshTask
+ implements RepositoryTask
+{
+ private String jobName;
+
+ private String policy;
+
+ public String getJobName()
+ {
+ return jobName;
+ }
+
+ public String getQueuePolicy()
+ {
+ return policy;
+ }
+
+ public void setJobName( String jobName )
+ {
+ this.jobName = jobName;
+ }
+
+ public void setQueuePolicy( String policy )
+ {
+ this.policy = policy;
+ }
+
+ public long getMaxExecutionTime()
+ {
+ return 0;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.task;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Task for discovering changes in the repository and updating the index accordingly.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class IndexerTask
- implements RepositoryTask
-{
- private String jobName;
-
- private String policy;
-
- public long getMaxExecutionTime()
- {
- return 0;
- }
-
- public String getJobName()
- {
- return jobName;
- }
-
- public String getQueuePolicy()
- {
- return policy;
- }
-
- public void setQueuePolicy( String policy )
- {
- this.policy = policy;
- }
-
- public void setJobName( String jobName )
- {
- this.jobName = jobName;
- }
-
-
-}
* under the License.
*/
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
import org.codehaus.plexus.PlexusTestCase;
import java.io.File;
LegacyRepositoryConverter rm = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE );
- rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, null, true );
+ rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, true );
}
}
/**
* DefaultActiveManagedRepositoriesTest
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class DefaultActiveManagedRepositoriesTest
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.File;
+
+/**
+ * IndexerTaskExecutorTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshExecutorTest
+ extends PlexusTestCase
+{
+ private TaskExecutor taskExecutor;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" );
+
+ ArchivaConfiguration archivaConfiguration =
+ (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ File indexPath = new File( configuration.getIndexPath() );
+ if ( indexPath.exists() )
+ {
+ FileUtils.deleteDirectory( indexPath );
+ }
+ }
+
+ public void testIndexer()
+ throws TaskExecutionException
+ {
+ taskExecutor.executeTask( new TestDataRefreshTask() );
+ }
+
+ class TestDataRefreshTask
+ extends DataRefreshTask
+ {
+ public String getJobName()
+ {
+ return "TestDataRefresh";
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-
-/**
- * IndexerTaskExecutorTest
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class IndexerTaskExecutorTest
- extends PlexusTestCase
-{
- private TaskExecutor taskExecutor;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "indexer" );
-
- ArchivaConfiguration archivaConfiguration =
- (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
- if ( indexPath.exists() )
- {
- FileUtils.deleteDirectory( indexPath );
- }
- }
-
- public void testIndexer()
- throws TaskExecutionException
- {
- taskExecutor.executeTask( new TestIndexerTask() );
- }
-
- class TestIndexerTask
- extends IndexerTask
- {
- public String getJobName()
- {
- return "TestIndexer";
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for artifact discoverers.
- *
- * @author John Casey
- * @author Brett Porter
- */
-public abstract class AbstractArtifactDiscoverer
- extends AbstractDiscoverer
- implements ArtifactDiscoverer
-{
- /**
- * Standard patterns to exclude from discovery as they are not artifacts.
- */
- private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".index", ".reports/**",
- ".maven/**", "**/*.md5", "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**",
- "*/licenses/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*",
- "**/CHANGELOG*", "**/KEYS*"};
-
- private List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns )
- {
- return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES );
- }
-
- public List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- File repositoryBase = new File( repository.getBasedir() );
-
- List artifacts = new ArrayList();
-
- List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns );
-
- for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
- {
- String path = (String) i.next();
-
- try
- {
- Artifact artifact = buildArtifactFromPath( path, repository );
-
- if ( filter.include( artifact ) )
- {
- artifacts.add( artifact );
- }
- else
- {
- addExcludedPath( path, "Omitted by filter" );
- }
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( path, e.getMessage() );
- }
- }
-
- return artifacts;
- }
-
- /**
- * Returns an artifact object that is represented by the specified path in a repository
- *
- * @param path The path that is pointing to an artifact
- * @param repository The repository of the artifact
- * @return Artifact
- * @throws DiscovererException when the specified path does correspond to an artifact
- */
- public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
- throws DiscovererException
- {
- Artifact artifact = buildArtifact( path );
-
- if ( artifact != null )
- {
- artifact.setRepository( repository );
- artifact.setFile( new File( repository.getBasedir(), path ) );
- }
-
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.DirectoryScanner;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for the artifact and metadata discoverers.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDiscoverer
- extends AbstractLogEnabled
- implements Discoverer
-{
- private List kickedOutPaths = new ArrayList();
-
- /**
- * @plexus.requirement
- */
- protected ArtifactFactory artifactFactory;
-
- private static final String[] EMPTY_STRING_ARRAY = new String[0];
-
- private List excludedPaths = new ArrayList();
-
- /**
- * @plexus.configuration default-value="true"
- */
- private boolean trackOmittedPaths;
-
- /**
- * Add a path to the list of files that were kicked out due to being invalid.
- *
- * @param path the path to add
- * @param reason the reason why the path is being kicked out
- */
- protected void addKickedOutPath( String path, String reason )
- {
- if ( trackOmittedPaths )
- {
- kickedOutPaths.add( new DiscovererPath( path, reason ) );
- }
- }
-
- /**
- * Add a path to the list of files that were excluded.
- *
- * @param path the path to add
- * @param reason the reason why the path is excluded
- */
- protected void addExcludedPath( String path, String reason )
- {
- excludedPaths.add( new DiscovererPath( path, reason ) );
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getKickedOutPathsIterator()
- {
- assert trackOmittedPaths;
- return kickedOutPaths.iterator();
- }
-
- protected List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns, String[] includes,
- String[] excludes )
- {
- List allExcludes = new ArrayList();
- allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
- if ( excludes != null )
- {
- allExcludes.addAll( Arrays.asList( excludes ) );
- }
- if ( blacklistedPatterns != null )
- {
- allExcludes.addAll( blacklistedPatterns );
- }
-
- DirectoryScanner scanner = new DirectoryScanner();
-
- scanner.setBasedir( repositoryBase );
-
- if ( includes != null )
- {
- scanner.setIncludes( includes );
- }
- scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
-
- // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
- scanner.scan();
-
- if ( trackOmittedPaths )
- {
- for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
- }
- }
-
- // TODO: this could be a part of the scanner
- List includedPaths = new ArrayList();
- for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- includedPaths.add( path );
- }
-
- return includedPaths;
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getExcludedPathsIterator()
- {
- assert trackOmittedPaths;
- return excludedPaths.iterator();
- }
-
- public void setTrackOmittedPaths( boolean trackOmittedPaths )
- {
- this.trackOmittedPaths = trackOmittedPaths;
- }
-}
*/
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
/**
* Interface for implementation that can discover artifacts within a repository.
*
* @author John Casey
* @author Brett Porter
- * @todo do we want blacklisted patterns in another form? Part of the object construction?
- * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
- * @todo instead of a returned list, should a listener be passed in?
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
*/
public interface ArtifactDiscoverer
extends Discoverer
{
String ROLE = ArtifactDiscoverer.class.getName();
- /**
- * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
- * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
- * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
- *
- * @param repository the location of the repository
- * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
- * @param filter filter for artifacts to include in the discovered list
- * @return the list of artifacts discovered
- * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
- */
- List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
- throws DiscovererException;
-
/**
* Build an artifact from a path in the repository
*
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the new repository layout (Maven 2.0+).
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="default"
- */
-public class DefaultArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( path, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
-
- Artifact artifact;
- if ( pathParts.size() >= 4 )
- {
- // maven 2.x path
-
- // the actual artifact filename.
- String filename = (String) pathParts.remove( 0 );
-
- // the next one is the version.
- String version = (String) pathParts.remove( 0 );
-
- // the next one is the artifactId.
- String artifactId = (String) pathParts.remove( 0 );
-
- // the remaining are the groupId.
- Collections.reverse( pathParts );
- String groupId = StringUtils.join( pathParts.iterator(), "." );
-
- String remainingFilename = filename;
- if ( remainingFilename.startsWith( artifactId + "-" ) )
- {
- remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
-
- String classifier = null;
-
- // TODO: use artifact handler, share with legacy discoverer
- String type;
- if ( remainingFilename.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
- }
- else if ( remainingFilename.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
- remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
- }
- else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
- {
- type = "java-source";
- classifier = "test-sources";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - "-test-sources.jar".length() );
- }
- else if ( remainingFilename.endsWith( "-sources.jar" ) )
- {
- type = "java-source";
- classifier = "sources";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
- }
- else
- {
- int index = remainingFilename.lastIndexOf( "." );
- if ( index >= 0 )
- {
- type = remainingFilename.substring( index + 1 );
- remainingFilename = remainingFilename.substring( 0, index );
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- Artifact result;
- if ( classifier == null )
- {
- result =
- artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
- }
- else
- {
- result =
- artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-
- if ( result.isSnapshot() )
- {
- // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
- int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
- if ( classifierIndex >= 0 )
- {
- classifier = remainingFilename.substring( classifierIndex + 1 );
- remainingFilename = remainingFilename.substring( 0, classifierIndex );
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
- type, classifier );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
- Artifact.SCOPE_RUNTIME, type );
- }
-
- // poor encapsulation requires we do this to populate base version
- if ( !result.isSnapshot() )
- {
- throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
- }
- else if ( !result.getBaseVersion().equals( version ) )
- {
- throw new DiscovererException(
- "Built snapshot artifact base version does not match path version: " + result +
- "; should have been version: " + version );
- }
- else
- {
- artifact = result;
- }
- }
- else if ( !remainingFilename.startsWith( version ) )
- {
- throw new DiscovererException( "Built artifact version does not match path version" );
- }
- else if ( !remainingFilename.equals( version ) )
- {
- if ( remainingFilename.charAt( version.length() ) == '-' )
- {
- classifier = remainingFilename.substring( version.length() + 1 );
- artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifier );
- }
- else
- {
- throw new DiscovererException( "Path version does not corresspond to an artifact version" );
- }
- }
- else
- {
- artifact = result;
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not correspond to an artifact" );
- }
- }
- else
- {
- throw new DiscovererException( "Path is too short to build an artifact from" );
- }
-
- return artifact;
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.DirectoryWalker;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Discoverer Implementation.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @plexus.component role="org.apache.maven.archiva.discoverer.Discoverer"
+ * role-hint="default"
+ */
+public class DefaultDiscoverer
+ extends AbstractLogEnabled
+ implements Discoverer
+{
+ /**
+ * Standard patterns to exclude from discovery as they are usually noise.
+ */
+ private static final String[] STANDARD_DISCOVERY_EXCLUDES = {
+ "bin/**",
+ "reports/**",
+ ".index",
+ ".reports/**",
+ ".maven/**",
+ "**/*snapshot-version",
+ "*/website/**",
+ "*/licences/**",
+ "**/.htaccess",
+ "**/*.html",
+ "**/*.txt",
+ "**/README*",
+ "**/CHANGELOG*",
+ "**/KEYS*" };
+
+ public DefaultDiscoverer()
+ {
+ }
+
+ public DiscovererStatistics scanRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+ {
+ return walkRepository( repository, consumers, includeSnapshots, true );
+ }
+
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+ {
+ return walkRepository( repository, consumers, includeSnapshots, false );
+ }
+
+ private DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+ boolean includeSnapshots, boolean checkLastModified )
+ {
+ // Sanity Check
+
+ if ( repository == null )
+ {
+ throw new IllegalArgumentException( "Unable to operate on a null repository." );
+ }
+
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ throw new UnsupportedOperationException( "Only filesystem repositories are supported." );
+ }
+
+ File repositoryBase = new File( repository.getBasedir() );
+
+ if ( !repositoryBase.exists() )
+ {
+ throw new UnsupportedOperationException( "Unable to scan a repository, directory "
+ + repositoryBase.getAbsolutePath() + " does not exist." );
+ }
+
+ if ( !repositoryBase.isDirectory() )
+ {
+ throw new UnsupportedOperationException( "Unable to scan a repository, path "
+ + repositoryBase.getAbsolutePath() + " is not a directory." );
+ }
+
+ // Setup Includes / Excludes.
+
+ List allExcludes = new ArrayList();
+ List allIncludes = new ArrayList();
+
+ // Exclude all of the SCM patterns.
+ allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
+
+ // Exclude all of the archiva noise patterns.
+ allExcludes.addAll( Arrays.asList( STANDARD_DISCOVERY_EXCLUDES ) );
+
+ if ( !includeSnapshots )
+ {
+ allExcludes.add( "**/*-SNAPSHOT*" );
+ }
+
+ Iterator it = consumers.iterator();
+ while ( it.hasNext() )
+ {
+ DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+
+ // TODO Disabled, until I can find a better way to do this that doesn't clobber other consumers. - joakime
+ // addUniqueElements( consumer.getExcludePatterns(), allExcludes );
+ addUniqueElements( consumer.getIncludePatterns(), allIncludes );
+ }
+
+ // Setup Directory Walker
+
+ DirectoryWalker dirWalker = new DirectoryWalker();
+
+ dirWalker.setBaseDir( repositoryBase );
+
+ dirWalker.setIncludes( allIncludes );
+ dirWalker.setExcludes( allExcludes );
+
+ // Setup the Scan Instance
+
+ RepositoryScanner repoScanner = new RepositoryScanner( repository, consumers );
+ repoScanner.setCheckLastModified( checkLastModified );
+
+ repoScanner.setLogger( getLogger() );
+ dirWalker.addDirectoryWalkListener( repoScanner );
+
+ // Execute scan.
+ dirWalker.scan();
+
+ return repoScanner.getStatistics();
+ }
+
+ private void addUniqueElements( List fromList, List toList )
+ {
+ Iterator itFrom = fromList.iterator();
+ while ( itFrom.hasNext() )
+ {
+ Object o = itFrom.next();
+ if ( !toList.contains( o ) )
+ {
+ toList.add( o );
+ }
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * This class gets all the paths that contain the metadata files.
- *
- * @plexus.component role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" role-hint="default"
- */
-public class DefaultMetadataDiscoverer
- extends AbstractDiscoverer
- implements MetadataDiscoverer
-{
- /**
- * Standard patterns to include in discovery of metadata files.
- *
- * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
- * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
- * searching the local metadata in the first place though?
- */
- private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
-
- public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- List metadataFiles = new ArrayList();
- List metadataPaths = scanForArtifactPaths( new File( repository.getBasedir() ), blacklistedPatterns,
- STANDARD_DISCOVERY_INCLUDES, null );
-
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- String metadataPath = (String) i.next();
- try
- {
- RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
- File f = new File( repository.getBasedir(), metadataPath );
- if ( filter.include( metadata, f.lastModified() ) )
- {
- metadataFiles.add( metadata );
- }
- else
- {
- addExcludedPath( metadataPath, "Metadata excluded by filter" );
- }
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( metadataPath, e.getMessage() );
- }
- }
-
- return metadataFiles;
- }
-
- public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
- throws DiscovererException
- {
- return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
- }
-
- private RepositoryMetadata buildMetadata( String repo, String metadataPath )
- throws DiscovererException
- {
- Metadata m;
- File f = new File( repo, metadataPath );
- try
- {
- Reader reader = new FileReader( f );
- MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
-
- m = metadataReader.read( reader );
- }
- catch ( XmlPullParserException e )
- {
- throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
- }
-
- RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
-
- if ( repositoryMetadata == null )
- {
- throw new DiscovererException( "Unable to build a repository metadata from path" );
- }
-
- return repositoryMetadata;
- }
-
- /**
- * Builds a RepositoryMetadata object from a Metadata object and its path.
- *
- * @param m Metadata
- * @param metadataPath path
- * @return RepositoryMetadata if the parameters represent one; null if not
- * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
- */
- private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
- {
- String metaGroupId = m.getGroupId();
- String metaArtifactId = m.getArtifactId();
- String metaVersion = m.getVersion();
-
- // check if the groupId, artifactId and version is in the
- // metadataPath
- // parse the path, in reverse order
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
- // remove the metadata file
- pathParts.remove( 0 );
- Iterator it = pathParts.iterator();
- String tmpDir = (String) it.next();
-
- Artifact artifact = null;
- if ( StringUtils.isNotEmpty( metaVersion ) )
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
- }
-
- // snapshotMetadata
- RepositoryMetadata metadata = null;
- if ( tmpDir != null && tmpDir.equals( metaVersion ) )
- {
- if ( artifact != null )
- {
- metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- }
- }
- else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
- {
- // artifactMetadata
- if ( artifact != null )
- {
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- else
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- }
- else
- {
- String groupDir = "";
- int ctr = 0;
- for ( it = pathParts.iterator(); it.hasNext(); )
- {
- String path = (String) it.next();
- if ( ctr == 0 )
- {
- groupDir = path;
- }
- else
- {
- groupDir = path + "." + groupDir;
- }
- ctr++;
- }
-
- // groupMetadata
- if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
- {
- metadata = new GroupRepositoryMetadata( metaGroupId );
- }
- }
-
- return metadata;
- }
-}
* under the License.
*/
-import java.util.Iterator;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
/**
- * @author Edwin Punzalan
+ * Discoverer - generic discoverer of content in an ArtifactRepository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
*/
public interface Discoverer
{
/**
- * Get the list of paths kicked out during the discovery process.
- *
- * @return the paths as Strings.
+ * Scan the repository for changes.
+ * Report changes to the appropriate Consumer.
+ *
+ * @param repository the repository to change.
+ * @param consumers use the provided list of consumers.
+ * @param includeSnapshots true to include snapshots in the scanning of this repository.
+ * @return the statistics for this scan.
*/
- Iterator getKickedOutPathsIterator();
-
+ public DiscovererStatistics scanRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots );
+
/**
- * Get the list of paths excluded during the discovery process.
- *
- * @return the paths as Strings.
+ * Walk the entire repository, regardless of change.
+ * Report changes to the appropriate Consumer.
+ *
+ * @param repository the repository to change.
+ * @param consumers use the provided list of consumers.
+ * @param includeSnapshots true to include snapshots in the walking of this repository.
+ * @return the statistics for this scan.
*/
- Iterator getExcludedPathsIterator();
-
- void setTrackOmittedPaths( boolean trackOmittedPaths );
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots );
}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * DiscovererConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface DiscovererConsumer
+{
+ public static final String ROLE = DiscovererConsumer.class.getName();
+
+ public String getName();
+
+ public boolean init( ArtifactRepository repository );
+
+ public List getExcludePatterns();
+
+ public List getIncludePatterns();
+
+ public void processFile( File file ) throws DiscovererException;
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+import org.codehaus.plexus.PlexusConstants;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+import org.codehaus.plexus.context.Context;
+import org.codehaus.plexus.context.ContextException;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
+
+/**
+ * DiscovererConsumerFactory - factory for consumers.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumerFactory"
+ */
+public class DiscovererConsumerFactory
+implements Contextualizable
+{
+ private PlexusContainer container;
+
+ public DiscovererConsumer createConsumer( String name ) throws DiscovererException
+ {
+ DiscovererConsumer consumer;
+ try
+ {
+ consumer = (DiscovererConsumer) container.lookup(DiscovererConsumer.ROLE, name);
+ }
+ catch ( ComponentLookupException e )
+ {
+ throw new DiscovererException("Unable to create consumer [" + name + "]: " + e.getMessage(), e);
+ }
+
+ return consumer;
+ }
+
+ public void contextualize( Context context )
+ throws ContextException
+ {
+ container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererPath
-{
- /**
- * The path discovered.
- */
- private final String path;
-
- /**
- * A comment about why the path is being processed.
- */
- private final String comment;
-
- public DiscovererPath( String path, String comment )
- {
- this.path = path;
- this.comment = comment;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public String getComment()
- {
- return comment;
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.util.IOUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.Properties;
+
+/**
+ * DiscovererStatistics
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DiscovererStatistics
+{
+ public static final String STATS_FILENAME = ".stats";
+
+ private static final String PROP_FILES_CONSUMED = "scan.consumed.files";
+
+ private static final String PROP_FILES_INCLUDED = "scan.included.files";
+
+ private static final String PROP_FILES_SKIPPED = "scan.skipped.files";
+
+ private static final String PROP_TIMESTAMP_STARTED = "scan.started.timestamp";
+
+ private static final String PROP_TIMESTAMP_FINISHED = "scan.finished.timestamp";
+
+ protected long timestampStarted = 0;
+
+ protected long timestampFinished = 0;
+
+ protected long filesIncluded = 0;
+
+ protected long filesConsumed = 0;
+
+ protected long filesSkipped = 0;
+
+ private ArtifactRepository repository;
+
+ public DiscovererStatistics( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ load();
+ }
+
+ public void load()
+ {
+ File repositoryBase = new File( this.repository.getBasedir() );
+
+ File scanProperties = new File( repositoryBase, STATS_FILENAME );
+ FileInputStream fis = null;
+ try
+ {
+ Properties props = new Properties();
+ fis = new FileInputStream( scanProperties );
+ props.load( fis );
+
+ timestampFinished = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_FINISHED ), 0 );
+ timestampStarted = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_STARTED ), 0 );
+ filesIncluded = NumberUtils.toLong( props.getProperty( PROP_FILES_INCLUDED ), 0 );
+ filesConsumed = NumberUtils.toLong( props.getProperty( PROP_FILES_CONSUMED ), 0 );
+ filesSkipped = NumberUtils.toLong( props.getProperty( PROP_FILES_SKIPPED ), 0 );
+ }
+ catch ( IOException e )
+ {
+ reset();
+ }
+ finally
+ {
+ IOUtil.close( fis );
+ }
+ }
+
+ public void save()
+ throws DiscovererException
+ {
+ Properties props = new Properties();
+ props.setProperty( PROP_TIMESTAMP_FINISHED, String.valueOf( timestampFinished ) );
+ props.setProperty( PROP_TIMESTAMP_STARTED, String.valueOf( timestampStarted ) );
+ props.setProperty( PROP_FILES_INCLUDED, String.valueOf( filesIncluded ) );
+ props.setProperty( PROP_FILES_CONSUMED, String.valueOf( filesConsumed ) );
+ props.setProperty( PROP_FILES_SKIPPED, String.valueOf( filesSkipped ) );
+
+ File repositoryBase = new File( this.repository.getBasedir() );
+ File statsFile = new File( repositoryBase, STATS_FILENAME );
+
+ FileOutputStream fos = null;
+ try
+ {
+ fos = new FileOutputStream( statsFile );
+ props.store( fos, "Last Scan Information, managed by Archiva. DO NOT EDIT" );
+ fos.flush();
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Unable to write scan stats to file " + statsFile.getAbsolutePath() + ": "
+ + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fos );
+ }
+ }
+
+ public void reset()
+ {
+ timestampStarted = 0;
+ timestampFinished = 0;
+ filesIncluded = 0;
+ filesConsumed = 0;
+ filesSkipped = 0;
+ }
+
+ public long getElapsedMilliseconds()
+ {
+ return timestampFinished - timestampStarted;
+ }
+
+ public long getFilesConsumed()
+ {
+ return filesConsumed;
+ }
+
+ public long getFilesIncluded()
+ {
+ return filesIncluded;
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return repository;
+ }
+
+ public long getTimestampFinished()
+ {
+ return timestampFinished;
+ }
+
+ public long getTimestampStarted()
+ {
+ return timestampStarted;
+ }
+
+ public long getFilesSkipped()
+ {
+ return filesSkipped;
+ }
+
+ public void setTimestampFinished( long timestampFinished )
+ {
+ this.timestampFinished = timestampFinished;
+ }
+
+ public void setTimestampStarted( long timestampStarted )
+ {
+ this.timestampStarted = timestampStarted;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the legacy repository layout (Maven 1.x).
- * Method used to build an artifact object using a relative path from a repository base directory. An artifactId
- * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
- * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
- * they are reserved for version usage.
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="legacy"
- */
-public class LegacyArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- StringTokenizer tokens = new StringTokenizer( path, "/\\" );
-
- Artifact result;
-
- int numberOfTokens = tokens.countTokens();
-
- if ( numberOfTokens == 3 )
- {
- String groupId = tokens.nextToken();
-
- String type = tokens.nextToken();
-
- if ( type.endsWith( "s" ) )
- {
- type = type.substring( 0, type.length() - 1 );
-
- // contains artifactId, version, classifier, and extension.
- String avceGlob = tokens.nextToken();
-
- //noinspection CollectionDeclaredAsConcreteClass
- LinkedList avceTokenList = new LinkedList();
-
- StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
- while ( avceTokenizer.hasMoreTokens() )
- {
- avceTokenList.addLast( avceTokenizer.nextToken() );
- }
-
- String lastAvceToken = (String) avceTokenList.removeLast();
-
- // TODO: share with other discoverer, use artifact handlers instead
- if ( lastAvceToken.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( "sources.jar" ) )
- {
- type = "java-source";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
- {
- type = "javadoc.jar";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- int extPos = lastAvceToken.lastIndexOf( '.' );
-
- if ( extPos > 0 )
- {
- String ext = lastAvceToken.substring( extPos + 1 );
- if ( type.equals( ext ) || "plugin".equals( type ) )
- {
- lastAvceToken = lastAvceToken.substring( 0, extPos );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- throw new DiscovererException( "Path type does not match the extension" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- // let's discover the version, and whatever's leftover will be either
- // a classifier, or part of the artifactId, depending on position.
- // Since version is at the end, we have to move in from the back.
- Collections.reverse( avceTokenList );
-
- // TODO: this is obscene - surely a better way?
- String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
- "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
- "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
- "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
- "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
- "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
- "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
-
- StringBuffer classifierBuffer = new StringBuffer();
- StringBuffer versionBuffer = new StringBuffer();
-
- boolean firstVersionTokenEncountered = false;
- boolean firstToken = true;
-
- int tokensIterated = 0;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- boolean tokenIsVersionPart = token.matches( validVersionParts );
-
- StringBuffer bufferToUpdate;
-
- // NOTE: logic in code is reversed, since we're peeling off the back
- // Any token after the last versionPart will be in the classifier.
- // Any token UP TO first non-versionPart is part of the version.
- if ( !tokenIsVersionPart )
- {
- if ( firstVersionTokenEncountered )
- {
- //noinspection BreakStatement
- break;
- }
- else
- {
- bufferToUpdate = classifierBuffer;
- }
- }
- else
- {
- firstVersionTokenEncountered = true;
-
- bufferToUpdate = versionBuffer;
- }
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- bufferToUpdate.insert( 0, '-' );
- }
-
- bufferToUpdate.insert( 0, token );
-
- tokensIterated++;
- }
-
- // Now, restore the proper ordering so we can build the artifactId.
- Collections.reverse( avceTokenList );
-
- // if we didn't find a version, then punt. Use the last token
- // as the version, and set the classifier empty.
- if ( versionBuffer.length() < 1 )
- {
- if ( avceTokenList.size() > 1 )
- {
- int lastIdx = avceTokenList.size() - 1;
-
- versionBuffer.append( avceTokenList.get( lastIdx ) );
- avceTokenList.remove( lastIdx );
- }
-
- classifierBuffer.setLength( 0 );
- }
- else
- {
- // if everything is kosher, then pop off all the classifier and
- // version tokens, leaving the naked artifact id in the list.
- avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
- }
-
- StringBuffer artifactIdBuffer = new StringBuffer();
-
- firstToken = true;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- artifactIdBuffer.append( '-' );
- }
-
- artifactIdBuffer.append( token );
- }
-
- String artifactId = artifactIdBuffer.toString();
-
- if ( artifactId.length() > 0 )
- {
- int lastVersionCharIdx = versionBuffer.length() - 1;
- if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
- {
- versionBuffer.setLength( lastVersionCharIdx );
- }
-
- String version = versionBuffer.toString();
-
- if ( version.length() > 0 )
- {
- if ( classifierBuffer.length() > 0 )
- {
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifierBuffer.toString() );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, version,
- Artifact.SCOPE_RUNTIME, type );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename version is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename artifactId is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
- }
- }
- else
- {
- throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
- }
-
- return result;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for discovering metadata files.
- */
-public interface MetadataDiscoverer
- extends Discoverer
-{
- String ROLE = MetadataDiscoverer.class.getName();
-
- /**
- * Search for metadata files in the repository.
- *
- * @param repository The repository.
- * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
- * @param metadataFilter filter to use on the discovered metadata before returning
- * @return the list of artifacts found
- * @throws DiscovererException if there is a problem during the discovery process
- */
- List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter )
- throws DiscovererException;
-
- /**
- * Search for metadata files in the repository.
- *
- * @param repository The repository.
- * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
- * @return the list of artifacts found
- * @throws DiscovererException if there is a problem during the discovery process
- */
- List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
- throws DiscovererException;
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+/**
+ * PathUtil - simple utility methods for path manipulation.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtil
+{
+ public static String getRelative( String basedir, File file )
+ {
+ return getRelative( basedir, file.getAbsolutePath() );
+ }
+
+ public static String getRelative( String basedir, String child )
+ {
+ if ( child.startsWith( basedir ) )
+ {
+ // simple solution.
+ return child.substring( basedir.length() + 1 );
+ }
+
+ String absoluteBasedir = new File( basedir ).getAbsolutePath();
+ if ( child.startsWith( absoluteBasedir ) )
+ {
+ // resolved basedir solution.
+ return child.substring( absoluteBasedir.length() + 1 );
+ }
+
+ // File is not within basedir.
+ throw new IllegalStateException( "Unable to obtain relative path of file " + child
+ + ", it is not within basedir " + basedir + "." );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.SystemUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.DirectoryWalkListener;
+import org.codehaus.plexus.util.SelectorUtils;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * RepositoryScanner - this is an instance of a scan against a repository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class RepositoryScanner
+ implements DirectoryWalkListener
+{
+ public static final String ROLE = RepositoryScanner.class.getName();
+
+ private List consumers;
+
+ private ArtifactRepository repository;
+
+ private Logger logger;
+
+ private boolean isCaseSensitive = true;
+
+ private DiscovererStatistics stats;
+
+ private boolean checkLastModified = true;
+
+ public RepositoryScanner( ArtifactRepository repository, List consumerList )
+ {
+ this.repository = repository;
+ this.consumers = consumerList;
+ stats = new DiscovererStatistics( repository );
+
+ Iterator it = this.consumers.iterator();
+ while ( it.hasNext() )
+ {
+ DiscovererConsumer consumer = (DiscovererConsumer) it.next();
+
+ if ( !consumer.init( this.repository ) )
+ {
+ throw new IllegalStateException( "Consumer [" + consumer.getName()
+ + "] is reporting that it is incompatible with the [" + repository.getId() + "] repository." );
+ }
+ }
+
+ if ( SystemUtils.IS_OS_WINDOWS )
+ {
+ isCaseSensitive = false;
+ }
+ }
+
+ public DiscovererStatistics getStatistics()
+ {
+ return stats;
+ }
+
+ public void directoryWalkFinished()
+ {
+ getLogger().info( "Walk Finished." );
+ stats.timestampFinished = System.currentTimeMillis();
+
+ if( isCheckLastModified() )
+ {
+ // Only save if dealing with 'last modified' concept.
+
+ try
+ {
+ stats.save();
+ }
+ catch ( DiscovererException e )
+ {
+ getLogger().warn( "Unable to save Scan information.", e );
+ }
+ }
+ }
+
+ public void directoryWalkStarting( File basedir )
+ {
+ getLogger().info( "Walk Started." );
+ stats.reset();
+ stats.timestampStarted = System.currentTimeMillis();
+ }
+
+ public void directoryWalkStep( int percentage, File file )
+ {
+ getLogger().info( "Walk Step: " + percentage + ", " + file );
+
+ // Timestamp finished points to the last successful scan, not this current one.
+ if ( isCheckLastModified() && ( file.lastModified() <= stats.timestampFinished ) )
+ {
+ // Skip file as no change has occured.
+ getLogger().debug( "Skipping, No Change: " + file.getAbsolutePath() );
+ stats.filesSkipped++;
+ return;
+ }
+
+ synchronized ( consumers )
+ {
+ stats.filesIncluded++;
+
+ String relativePath = PathUtil.getRelative( repository.getBasedir(), file );
+
+ Iterator itConsumers = this.consumers.iterator();
+ while ( itConsumers.hasNext() )
+ {
+ DiscovererConsumer consumer = (DiscovererConsumer) itConsumers.next();
+
+ if ( isConsumerOfFile( consumer, relativePath ) )
+ {
+ try
+ {
+ getLogger().info( "Sending to consumer: " + consumer.getName() );
+ stats.filesConsumed++;
+ consumer.processFile( file );
+ }
+ catch ( Exception e )
+ {
+ /* Intentionally Catch all exceptions.
+ * So that the discoverer processing can continue.
+ */
+ getLogger()
+ .error( "Unable to process file [" + file.getAbsolutePath() + "]: " + e.getMessage(), e );
+ }
+ }
+ else
+ {
+ getLogger().info( "Skipping consumer " + consumer.getName() + " for file " + relativePath );
+ }
+ }
+ }
+ }
+
+ private boolean isConsumerOfFile( DiscovererConsumer consumer, String relativePath )
+ {
+ Iterator it = consumer.getIncludePatterns().iterator();
+ // String name = file.getAbsolutePath();
+ while ( it.hasNext() )
+ {
+ String pattern = (String) it.next();
+ if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+ {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public boolean isCheckLastModified()
+ {
+ return checkLastModified;
+ }
+
+ public void setCheckLastModified( boolean checkLastModified )
+ {
+ this.checkLastModified = checkLastModified;
+ }
+
+ /**
+ * Debug method from DirectoryWalker.
+ */
+ public void debug( String message )
+ {
+ getLogger().debug( "Repository Scanner: " + message );
+ }
+
+ public Logger getLogger()
+ {
+ return logger;
+ }
+
+ public void setLogger( Logger logger )
+ {
+ this.logger = logger;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractLayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ /**
+ * @plexus.requirement
+ */
+ protected ArtifactFactory artifactFactory;
+
+ /**
+ * Constructor used by plexus
+ */
+ public AbstractLayoutArtifactBuilder()
+ {
+
+ }
+
+ /**
+ * Constructor used by manual process.
+ *
+ * @param artifactFactory the artifact factory to use.
+ */
+ public AbstractLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ this.artifactFactory = artifactFactory;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+/**
+ * BuilderException - used to indicate a problem during the building of an object from file.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BuilderException
+ extends DiscovererException
+{
+
+ public BuilderException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public BuilderException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * DefaultLayoutArtifactBuilder - artifact builder for default layout repositories.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder"
+ * role-hint="default"
+ */
+public class DefaultLayoutArtifactBuilder
+ extends AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ public DefaultLayoutArtifactBuilder()
+ {
+ super();
+ }
+
+ public DefaultLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ super( artifactFactory );
+ }
+
+ public Artifact build( String pathToArtifact )
+ throws BuilderException, DiscovererException
+ {
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( pathToArtifact, "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+
+ Artifact artifact;
+ if ( pathParts.size() >= 4 )
+ {
+ // maven 2.x path
+
+ // the actual artifact filename.
+ String filename = (String) pathParts.remove( 0 );
+
+ // the next one is the version.
+ String version = (String) pathParts.remove( 0 );
+
+ // the next one is the artifactId.
+ String artifactId = (String) pathParts.remove( 0 );
+
+ // the remaining are the groupId.
+ Collections.reverse( pathParts );
+ String groupId = StringUtils.join( pathParts.iterator(), "." );
+
+ String remainingFilename = filename;
+ if ( remainingFilename.startsWith( artifactId + "-" ) )
+ {
+ remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
+
+ String classifier = null;
+
+ // TODO: use artifact handler, share with legacy discoverer
+ String type;
+ if ( remainingFilename.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+ remainingFilename = remainingFilename
+ .substring( 0, remainingFilename.length() - ".tar.gz".length() );
+ }
+ else if ( remainingFilename.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
+ }
+ else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
+ {
+ type = "java-source";
+ classifier = "test-sources";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+ - "-test-sources.jar".length() );
+ }
+ else if ( remainingFilename.endsWith( "-sources.jar" ) )
+ {
+ type = "java-source";
+ classifier = "sources";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+ - "-sources.jar".length() );
+ }
+ else
+ {
+ int index = remainingFilename.lastIndexOf( "." );
+ if ( index >= 0 )
+ {
+ type = remainingFilename.substring( index + 1 );
+ remainingFilename = remainingFilename.substring( 0, index );
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not have an extension." );
+ }
+ }
+
+ Artifact result;
+ if ( classifier == null )
+ {
+ result = artifactFactory
+ .createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
+ }
+ else
+ {
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
+ }
+
+ if ( result.isSnapshot() )
+ {
+ // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
+ int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
+ if ( classifierIndex >= 0 )
+ {
+ classifier = remainingFilename.substring( classifierIndex + 1 );
+ remainingFilename = remainingFilename.substring( 0, classifierIndex );
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+ type, classifier );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+
+ // poor encapsulation requires we do this to populate base version
+ if ( !result.isSnapshot() )
+ {
+ throw new BuilderException( "Failed to create a snapshot artifact: " + result );
+ }
+ else if ( !result.getBaseVersion().equals( version ) )
+ {
+ throw new BuilderException(
+ "Built snapshot artifact base version does not match path version: "
+ + result.getBaseVersion() + "; should have been version: "
+ + version );
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else if ( !remainingFilename.startsWith( version ) )
+ {
+ throw new BuilderException( "Built artifact version does not match path version" );
+ }
+ else if ( !remainingFilename.equals( version ) )
+ {
+ if ( remainingFilename.charAt( version.length() ) == '-' )
+ {
+ classifier = remainingFilename.substring( version.length() + 1 );
+ artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
+ }
+ else
+ {
+ throw new BuilderException( "Path version does not corresspond to an artifact version" );
+ }
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not correspond to an artifact." );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path is too short to build an artifact from." );
+ }
+
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+/**
+ * LayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @todo this concept should really exist inside of the {@link ArtifactRepositoryLayout}
+ */
+public interface LayoutArtifactBuilder
+{
+ public Artifact build( String pathToArtifact ) throws BuilderException, DiscovererException;
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+
+/**
+ * LegacyLayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder"
+ * role-hint="legacy"
+ */
+public class LegacyLayoutArtifactBuilder
+ extends AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ public LegacyLayoutArtifactBuilder()
+ {
+ super();
+ }
+
+ public LegacyLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ super( artifactFactory );
+ }
+
+ public Artifact build( String pathToArtifact )
+ throws BuilderException, DiscovererException
+ {
+ StringTokenizer tokens = new StringTokenizer( pathToArtifact, "/\\" );
+
+ Artifact result;
+
+ int numberOfTokens = tokens.countTokens();
+
+ if ( numberOfTokens == 3 )
+ {
+ String groupId = tokens.nextToken();
+
+ String type = tokens.nextToken();
+
+ if ( type.endsWith( "s" ) )
+ {
+ type = type.substring( 0, type.length() - 1 );
+
+ // contains artifactId, version, classifier, and extension.
+ String avceGlob = tokens.nextToken();
+
+ //noinspection CollectionDeclaredAsConcreteClass
+ LinkedList avceTokenList = new LinkedList();
+
+ StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
+ while ( avceTokenizer.hasMoreTokens() )
+ {
+ avceTokenList.addLast( avceTokenizer.nextToken() );
+ }
+
+ String lastAvceToken = (String) avceTokenList.removeLast();
+
+ // TODO: share with other discoverer, use artifact handlers instead
+ if ( lastAvceToken.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( "sources.jar" ) )
+ {
+ type = "java-source";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
+ {
+ type = "javadoc.jar";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ int extPos = lastAvceToken.lastIndexOf( '.' );
+
+ if ( extPos > 0 )
+ {
+ String ext = lastAvceToken.substring( extPos + 1 );
+ if ( type.equals( ext ) || "plugin".equals( type ) )
+ {
+ lastAvceToken = lastAvceToken.substring( 0, extPos );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ throw new BuilderException( "Path type does not match the extension" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not have an extension" );
+ }
+ }
+
+ // let's discover the version, and whatever's leftover will be either
+ // a classifier, or part of the artifactId, depending on position.
+ // Since version is at the end, we have to move in from the back.
+ Collections.reverse( avceTokenList );
+
+ // TODO: this is obscene - surely a better way?
+ String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+ + "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+ + "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+ + "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+ + "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+ + "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+ + "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
+
+ StringBuffer classifierBuffer = new StringBuffer();
+ StringBuffer versionBuffer = new StringBuffer();
+
+ boolean firstVersionTokenEncountered = false;
+ boolean firstToken = true;
+
+ int tokensIterated = 0;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ boolean tokenIsVersionPart = token.matches( validVersionParts );
+
+ StringBuffer bufferToUpdate;
+
+ // NOTE: logic in code is reversed, since we're peeling off the back
+ // Any token after the last versionPart will be in the classifier.
+ // Any token UP TO first non-versionPart is part of the version.
+ if ( !tokenIsVersionPart )
+ {
+ if ( firstVersionTokenEncountered )
+ {
+ //noinspection BreakStatement
+ break;
+ }
+ else
+ {
+ bufferToUpdate = classifierBuffer;
+ }
+ }
+ else
+ {
+ firstVersionTokenEncountered = true;
+
+ bufferToUpdate = versionBuffer;
+ }
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ bufferToUpdate.insert( 0, '-' );
+ }
+
+ bufferToUpdate.insert( 0, token );
+
+ tokensIterated++;
+ }
+
+ // Now, restore the proper ordering so we can build the artifactId.
+ Collections.reverse( avceTokenList );
+
+ // if we didn't find a version, then punt. Use the last token
+ // as the version, and set the classifier empty.
+ if ( versionBuffer.length() < 1 )
+ {
+ if ( avceTokenList.size() > 1 )
+ {
+ int lastIdx = avceTokenList.size() - 1;
+
+ versionBuffer.append( avceTokenList.get( lastIdx ) );
+ avceTokenList.remove( lastIdx );
+ }
+
+ classifierBuffer.setLength( 0 );
+ }
+ else
+ {
+ // if everything is kosher, then pop off all the classifier and
+ // version tokens, leaving the naked artifact id in the list.
+ avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
+ }
+
+ StringBuffer artifactIdBuffer = new StringBuffer();
+
+ firstToken = true;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ artifactIdBuffer.append( '-' );
+ }
+
+ artifactIdBuffer.append( token );
+ }
+
+ String artifactId = artifactIdBuffer.toString();
+
+ if ( artifactId.length() > 0 )
+ {
+ int lastVersionCharIdx = versionBuffer.length() - 1;
+ if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
+ {
+ versionBuffer.setLength( lastVersionCharIdx );
+ }
+
+ String version = versionBuffer.toString();
+
+ if ( version.length() > 0 )
+ {
+ if ( classifierBuffer.length() > 0 )
+ {
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifierBuffer.toString() );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, version,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename version is empty" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename artifactId is empty" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path artifact type does not corresspond to an artifact type" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path does not match a legacy repository path for an artifact" );
+ }
+
+ return result;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * AbstractDiscovererConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractDiscovererConsumer
+ extends AbstractLogEnabled
+ implements DiscovererConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ protected ArtifactFactory artifactFactory;
+
+ protected ArtifactRepository repository;
+
+ public List getExcludePatterns()
+ {
+ return Collections.EMPTY_LIST;
+ }
+
+ public boolean init( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ return isEnabled();
+ }
+
+ protected boolean isEnabled()
+ {
+ return true;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.archiva.discoverer.builders.BuilderException;
+import org.apache.maven.archiva.discoverer.builders.DefaultLayoutArtifactBuilder;
+import org.apache.maven.archiva.discoverer.builders.LayoutArtifactBuilder;
+import org.apache.maven.archiva.discoverer.builders.LegacyLayoutArtifactBuilder;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * DefaultArtifactConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericArtifactConsumer
+ extends AbstractDiscovererConsumer
+ implements DiscovererConsumer
+{
+ private Map artifactBuilders = new HashMap();
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/*.pom" );
+ includePatterns.add( "**/*.jar" );
+ includePatterns.add( "**/*.war" );
+ includePatterns.add( "**/*.ear" );
+ includePatterns.add( "**/*.sar" );
+ includePatterns.add( "**/*.zip" );
+ includePatterns.add( "**/*.gz" );
+ includePatterns.add( "**/*.bz2" );
+ }
+
+ private String layoutId = "default";
+
+ public GenericArtifactConsumer()
+ {
+ }
+
+ public boolean init( ArtifactRepository repository )
+ {
+ this.artifactBuilders.clear();
+ this.artifactBuilders.put( "default", new DefaultLayoutArtifactBuilder( artifactFactory ) );
+ this.artifactBuilders.put( "legacy", new LegacyLayoutArtifactBuilder( artifactFactory ) );
+
+ if ( repository.getLayout() instanceof LegacyRepositoryLayout )
+ {
+ this.layoutId = "legacy";
+ }
+
+ return super.init( repository );
+ }
+
+ public abstract void processArtifact( Artifact artifact, File file );
+
+ public abstract void processArtifactBuildFailure( File path, String message );
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public String getName()
+ {
+ return "Artifact Consumer";
+ }
+
+ public boolean isEnabled()
+ {
+ ArtifactRepositoryLayout layout = repository.getLayout();
+ return ( layout instanceof DefaultRepositoryLayout ) || ( layout instanceof LegacyRepositoryLayout );
+ }
+
+ public void processFile( File file )
+ throws DiscovererException
+ {
+ try
+ {
+ Artifact artifact = buildArtifact( repository.getBasedir(), file.getAbsolutePath() );
+
+ processArtifact( artifact, file );
+ }
+ catch ( BuilderException e )
+ {
+ processArtifactBuildFailure( file, e.getMessage() );
+ }
+ }
+
+ /**
+ * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
+ */
+ private Artifact buildArtifact( String repoBaseDir, String path )
+ throws BuilderException, DiscovererException
+ {
+ LayoutArtifactBuilder builder = (LayoutArtifactBuilder) artifactBuilders.get( layoutId );
+
+ String relativePath = PathUtil.getRelative( repoBaseDir, path );
+
+ Artifact artifact = builder.build( relativePath );
+ artifact.setRepository( repository );
+ artifact.setFile( new File( repository.getBasedir(), path ) );
+
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GenericModelConsumer - consumer for pom files.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericModelConsumer
+ extends AbstractDiscovererConsumer
+ implements DiscovererConsumer
+{
+ public abstract void processModel( Model model, File file );
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/*.pom" );
+ }
+
+ public GenericModelConsumer()
+ {
+
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public String getName()
+ {
+ return "MavenProject Consumer";
+ }
+
+ public boolean isEnabled()
+ {
+ return true;
+ }
+
+ public void processFile( File file )
+ throws DiscovererException
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ Model model = buildModel( repository.getBasedir(), relpath );
+ processModel( model, file );
+ }
+
+ private Model buildModel( String basedir, String modelpath )
+ throws DiscovererException
+ {
+ Model model;
+ File f = new File( basedir, modelpath );
+ Reader reader = null;
+ try
+ {
+ reader = new FileReader( f );
+ MavenXpp3Reader modelReader = new MavenXpp3Reader();
+
+ model = modelReader.read( reader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( reader );
+ }
+
+ return model;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * GenericRepositoryMetadataConsumer - Consume any maven-metadata.xml files as {@link RepositoryMetadata} objects.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericRepositoryMetadataConsumer
+ extends AbstractDiscovererConsumer
+ implements DiscovererConsumer
+{
+ public abstract void processRepositoryMetadata( RepositoryMetadata metadata, File file );
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/maven-metadata.xml" );
+ }
+
+ public GenericRepositoryMetadataConsumer()
+ {
+
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public String getName()
+ {
+ return "RepositoryMetadata Consumer";
+ }
+
+ public boolean isEnabled()
+ {
+ // the RepositoryMetadata objects only exist in 'default' layout repositories.
+ ArtifactRepositoryLayout layout = repository.getLayout();
+ return ( layout instanceof DefaultRepositoryLayout );
+ }
+
+ public void processFile( File file )
+ throws DiscovererException
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), relpath );
+ processRepositoryMetadata( metadata, file );
+ }
+
+ private RepositoryMetadata buildMetadata( String repo, String metadataPath )
+ throws DiscovererException
+ {
+ Metadata m;
+ File f = new File( repo, metadataPath );
+ Reader reader = null;
+ try
+ {
+ reader = new FileReader( f );
+ MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
+
+ m = metadataReader.read( reader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( reader );
+ }
+
+ RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
+
+ if ( repositoryMetadata == null )
+ {
+ throw new DiscovererException( "Unable to build a repository metadata from path" );
+ }
+
+ return repositoryMetadata;
+ }
+
+ /**
+ * Builds a RepositoryMetadata object from a Metadata object and its path.
+ *
+ * @param m Metadata
+ * @param metadataPath path
+ * @return RepositoryMetadata if the parameters represent one; null if not
+ * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
+ */
+ private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
+ {
+ String metaGroupId = m.getGroupId();
+ String metaArtifactId = m.getArtifactId();
+ String metaVersion = m.getVersion();
+
+ // check if the groupId, artifactId and version is in the
+ // metadataPath
+ // parse the path, in reverse order
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+ // remove the metadata file
+ pathParts.remove( 0 );
+ Iterator it = pathParts.iterator();
+ String tmpDir = (String) it.next();
+
+ Artifact artifact = null;
+ if ( StringUtils.isNotEmpty( metaVersion ) )
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
+ }
+
+ // snapshotMetadata
+ RepositoryMetadata metadata = null;
+ if ( tmpDir != null && tmpDir.equals( metaVersion ) )
+ {
+ if ( artifact != null )
+ {
+ metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
+ {
+ // artifactMetadata
+ if ( artifact != null )
+ {
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ else
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else
+ {
+ String groupDir = "";
+ int ctr = 0;
+ for ( it = pathParts.iterator(); it.hasNext(); )
+ {
+ String path = (String) it.next();
+ if ( ctr == 0 )
+ {
+ groupDir = path;
+ }
+ else
+ {
+ groupDir = path + "." + groupDir;
+ }
+ ctr++;
+ }
+
+ // groupMetadata
+ if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
+ {
+ metadata = new GroupRepositoryMetadata( metaGroupId );
+ }
+ }
+
+ return metadata;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllArtifactFilter
- implements ArtifactFilter
-{
- public boolean include( Artifact artifact )
- {
- return true;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllMetadataFilter
- implements MetadataFilter
-{
- public boolean include( RepositoryMetadata metadata, long timestamp )
- {
- return true;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Ability to filter repository metadata lists.
- *
- * @todo should be in maven-artifact
- */
-public interface MetadataFilter
-{
- /**
- * Whether to include this metadata in the filtered list.
- *
- * @param metadata the metadata
- * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated
- * @return whether to include it
- */
- boolean include( RepositoryMetadata metadata, long timestamp );
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * A filter to remove snapshot artifacts during discovery.
- */
-public class SnapshotArtifactFilter
- implements ArtifactFilter
-{
- public boolean include( Artifact artifact )
- {
- return !artifact.isSnapshot();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public abstract class AbstractArtifactDiscovererTest
- extends PlexusTestCase
-{
- protected ArtifactDiscoverer discoverer;
-
- private ArtifactFactory factory;
-
- protected ArtifactRepository repository;
-
- protected abstract String getLayout();
-
- protected abstract File getRepositoryFile();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getRepositoryFile();
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout =
- (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version )
- {
- Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
- {
- return factory.createArtifact( groupId, artifactId, version, null, type );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ */
+public abstract class AbstractDiscovererTestCase
+ extends PlexusTestCase
+{
+ protected ArtifactRepository getLegacyRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ protected ArtifactRepository getDefaultRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ private void resetRepositoryState( ArtifactRepository repository )
+ {
+ // Clean out any .stats file.
+ File repoBaseDir = new File( repository.getBasedir() );
+
+ File statFile = new File( repoBaseDir, DiscovererStatistics.STATS_FILENAME );
+ if ( statFile.exists() )
+ {
+ statFile.delete();
+ }
+
+ // TODO: Clean out any index.
+ // TODO: Clean out any report.
+ }
+
+ protected ArtifactRepository createRepository( File basedir, String layout )
+ throws Exception
+ {
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+ return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, repoLayout, null, null );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the default artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:DefaultArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class DefaultArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
- private static final List JAVAX_BLACKLIST = Collections.singletonList( "javax/**" );
-
- protected String getLayout()
- {
- return "default";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- boolean b = path.indexOf( "CVS" ) >= 0;
- if ( b )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithBlacklist( JAVAX_BLACKLIST );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
- dPath.getComment() );
-
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongArtifactId()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not wrong jar",
- "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoType()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongerVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongSnapshotVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithSnapshotBaseVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
- "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testTestSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotInclusionWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check snapshot included", artifacts.contains(
- createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testStandalonePoms()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
-
- // cull down to actual artifacts (only standalone poms will have type = pom)
- Map keyedArtifacts = new HashMap();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
- if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
- {
- keyedArtifacts.put( key, a );
- }
- }
-
- List models = new ArrayList();
-
- for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
-
- if ( "pom".equals( a.getType() ) )
- {
- models.add( a );
- }
- }
-
- assertEquals( 4, models.size() );
-
- // Define order we expect
- Collections.sort( models );
-
- Iterator itr = models.iterator();
- Artifact model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "2.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.testgroup", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- }
-
- public void testShortPath()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
-
- fail( "Artifact should be null for short paths" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongArtifactId()
- throws ComponentLookupException
- {
-
- try
- {
- discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
-
- fail( "Artifact should be null for wrong ArtifactId" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
-
- fail( "Artifact should be null for wrong version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testLongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
-
- fail( "Artifact should be null for long version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongSnapshotVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
-
- fail( "Artifact should be null for wrong snapshot version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshotBaseVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
-
- fail( "Artifact should be null for snapshot base version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testPathWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
- }
-
- public void testWithJavaSourceInclusion()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
- }
-
- public void testDistributionArtifacts()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
-
- testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
-
- testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-
- public void testSnapshotWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
- artifact );
- }
-
- private List discoverArtifactsWithSnapshots()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
- }
-
- private List discoverArtifactsWithBlacklist( List list )
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, list, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifacts()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * This class tests the DefaultMetadataDiscoverer class.
- */
-public class DefaultMetadataDiscovererTest
- extends PlexusTestCase
-{
- private MetadataDiscoverer discoverer;
-
- private static final String TEST_OPERATION = "test";
-
- private ArtifactRepository repository;
-
- private ArtifactFactory factory;
-
- /**
- *
- */
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
-
- removeTimestampMetadata();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getTestFile( "src/test/repository" );
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- /**
- *
- */
- public void tearDown()
- throws Exception
- {
- super.tearDown();
- discoverer = null;
- }
-
- /**
- * Test if metadata file in wrong directory was added to the kickedOutPaths.
- */
- public void testKickoutWrongDirectory()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
- dPath.getComment() );
- }
- }
- assertTrue( found );
- }
-
- /**
- * Test if blank metadata file was added to the kickedOutPaths.
- */
- public void testKickoutBlankMetadata()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertTrue( "Check reason for kickout", dPath.getComment().matches(
- "Error reading metadata file '(.*)': input contained no data" ) );
- }
- }
- assertTrue( found );
- }
-
- private void removeTimestampMetadata()
- throws IOException
- {
- // remove the metadata that tracks time
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
- System.gc(); // for Windows
- file.delete();
- assertFalse( file.exists() );
- }
-
- public void testDiscoverMetadata()
- throws DiscovererException
- {
- List metadataPaths = discoverer.discoverMetadata( repository, null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata =
- new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata = new GroupRepositoryMetadata( "org.apache.maven" );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId )
- {
- return createArtifact( groupId, artifactId, "1.0" );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String version )
- {
- return factory.createArtifact( groupId, artifactId, version, null, "jar" );
- }
-
- private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
- {
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- RepositoryMetadata m = (RepositoryMetadata) i.next();
-
- if ( m.getGroupId().equals( metadata.getGroupId() ) )
- {
- if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
- {
- return true;
- }
- else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
- {
- return true;
- }
- }
- }
- return false;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Test the legacy artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:LegacyArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class LegacyArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
- private static final List JAVAX_SQL_BLACKLIST = Collections.singletonList( "javax.sql/**" );
-
- protected String getLayout()
- {
- return "legacy";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/legacy-repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( path.indexOf( "CVS" ) >= 0 )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithBlacklist();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithInvalidType()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoExtension()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongExtension()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path type does not match the extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testTextualVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testWrongArtifactPackaging()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
-
- fail( "Artifact should be null for wrong package extension" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoArtifactId()
- throws DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
-
- try
- {
- discoverer.buildArtifact( "groupId/jars/1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
- }
-
- public void testFinal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/jars/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-
- public void testJavadoc()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc" ), artifact );
- }
-
- public void testSources()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/java-sources/jdbc-2.0-sources.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources" ), artifact );
- }
-
- public void testPlugin()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "maven/plugins/maven-test-plugin-1.8.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "maven", "maven-test-plugin", "1.8", "plugin" ), artifact );
- }
-
-
- private List discoverArtifacts()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifactsWithBlacklist()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, JAVAX_SQL_BLACKLIST, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifactsWithSnapshots()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.PlexusTestCase;
+
+public class AbstractLayoutArtifactBuilderTestCase
+extends PlexusTestCase
+{
+
+ protected void assertArtifact( String groupId, String artifactId, String version, String type, String classifier, Artifact artifact )
+ {
+ assertNotNull( "Artifact cannot be null.", artifact );
+
+ assertEquals( "Artifact groupId", groupId, artifact.getGroupId() );
+ assertEquals( "Artifact artifactId", artifactId, artifact.getArtifactId() );
+ assertEquals( "Artifact version", version, artifact.getVersion() );
+ assertEquals( "Artifact type", type, artifact.getType() );
+
+ if ( StringUtils.isNotBlank( classifier ) )
+ {
+ assertEquals( "Artifact classifier", classifier, artifact.getClassifier() );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+
+/**
+ * DefaultLayoutArtifactBuilderTest
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultLayoutArtifactBuilderTest
+ extends AbstractLayoutArtifactBuilderTestCase
+{
+ LayoutArtifactBuilder builder;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "default" );
+ assertNotNull( builder );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( builder != null )
+ {
+ release( builder );
+ }
+ super.tearDown();
+ }
+
+ public void testPathDistributionArtifacts()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ) );
+
+ assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0.zip" ) );
+ }
+
+ public void testPathNormal()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+ .build( "/org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+ assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+ .build( "org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, builder.build( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ) );
+
+ }
+
+ public void testPathSnapshots()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT", "jar", null, builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar" ) );
+
+ assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ) );
+ }
+
+ public void testPathSnapshotWithClassifier()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ) );
+ }
+
+ public void testPathWithClassifier()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", builder
+ .build( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ) );
+ }
+
+ public void testPathWithJavaSourceInclusion()
+ throws BuilderException, DiscovererException
+ {
+ assertArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ) );
+ }
+
+ public void testProblemMissingType()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1/invalid-1" );
+ fail( "Should have detected missing type." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename does not have an extension.", e.getMessage() );
+ }
+ }
+
+ public void testProblemNonSnapshotInSnapshotDir()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+ fail( "Non Snapshot artifact inside of an Snapshot dir is invalid." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", e.getMessage() );
+ }
+ }
+
+ public void testProblemPathTooShort()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid-1.0.jar" );
+ fail( "Should have detected that path is too short." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path is too short to build an artifact from.", e.getMessage() );
+ }
+ }
+
+ public void testProblemTimestampSnapshotNotInSnapshotDir()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+ fail( "Timestamped Snapshot artifact not inside of an Snapshot dir is invalid." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ // TODO: Is this really the right thing to do for this kind of artifact??
+ assertEquals( "Built snapshot artifact base version does not match path version: 1.0-SNAPSHOT; "
+ + "should have been version: 1.0-20050611.123456-1", e.getMessage() );
+ }
+ }
+
+ public void testProblemVersionPathMismatch()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0/invalid-2.0.jar" );
+ fail( "Should have detected version mismatch between path and artifact." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Built artifact version does not match path version", e.getMessage() );
+ }
+ }
+
+ public void testProblemVersionPathMismatchAlt()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0/invalid-1.0b.jar" );
+ fail( "Should have version mismatch between directory and artifact." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path version does not corresspond to an artifact version", e.getMessage() );
+ }
+ }
+
+ public void testProblemWrongArtifactId()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+ fail( "Should have detected wrong artifact Id." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename does not correspond to an artifact.", e.getMessage() );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.builders;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+/**
+ * LegacyLayoutArtifactBuilderTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class LegacyLayoutArtifactBuilderTest
+ extends AbstractLayoutArtifactBuilderTestCase
+{
+ LayoutArtifactBuilder builder;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "legacy" );
+ assertNotNull( builder );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( builder != null )
+ {
+ release( builder );
+ }
+ super.tearDown();
+ }
+
+ public void testPathNormal()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "javax.sql/jars/jdbc-2.0.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifact );
+ }
+
+ public void testPathFinal()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar" );
+
+ assertArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606", "jar", null, artifact );
+ }
+
+ public void testPathSnapshot()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar" );
+
+ assertArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT", "jar", null, artifact );
+ }
+
+ public void testPathJavadoc()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc", artifact );
+ }
+
+ public void testPathSources()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "javax.sql/java-sources/jdbc-2.0-sources.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources", artifact );
+ }
+
+ public void testPathPlugin()
+ throws BuilderException, DiscovererException
+ {
+ Artifact artifact = builder.build( "maven/plugins/maven-test-plugin-1.8.jar" );
+
+ assertArtifact( "maven", "maven-test-plugin", "1.8", "plugin", null, artifact );
+ }
+
+ public void testProblemNoType()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1/invalid-1" );
+
+ fail( "Should have detected no type." );
+ }
+ catch ( DiscovererException e )
+ {
+ /* expected path */
+ assertEquals( "Path does not match a legacy repository path for an artifact", e.getMessage() );
+ }
+ }
+
+ public void testProblemWrongArtifactPackaging()
+ throws ComponentLookupException, DiscovererException
+ {
+ try
+ {
+ builder.build( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
+
+ fail( "Should have detected wrong package extension." );
+ }
+ catch ( DiscovererException e )
+ {
+ /* expected path */
+ assertEquals( "Path type does not match the extension", e.getMessage() );
+ }
+ }
+
+ public void testProblemNoArtifactId()
+ throws DiscovererException
+ {
+ try
+ {
+ builder.build( "groupId/jars/-1.0.jar" );
+
+ fail( "Should have detected artifactId is missing" );
+ }
+ catch ( DiscovererException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename artifactId is empty", e.getMessage() );
+ }
+
+ try
+ {
+ builder.build( "groupId/jars/1.0.jar" );
+
+ fail( "Should have detected artifactId is missing" );
+ }
+ catch ( DiscovererException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename artifactId is empty", e.getMessage() );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.discoverer.AbstractDiscovererTestCase;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractConsumerTestCase
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractConsumerTestCase
+ extends AbstractDiscovererTestCase
+{
+ protected ArtifactFactory artifactFactory;
+
+ protected Discoverer discoverer;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ discoverer = (Discoverer) lookup( Discoverer.class.getName(), "default" );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( discoverer != null )
+ {
+ release( discoverer );
+ }
+
+ if ( artifactFactory != null )
+ {
+ release( artifactFactory );
+ }
+ super.tearDown();
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version, String type, String classifier )
+ {
+ if ( StringUtils.isNotBlank( classifier ) )
+ {
+ return artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+ }
+ else
+ {
+ return artifactFactory.createArtifact( groupId, artifactId, version, "runtime", type );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.util.DirectoryScanner;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericArtifactConsumerTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericArtifactConsumerTest
+ extends AbstractConsumerTestCase
+{
+ private MockArtifactConsumer getMockArtifactConsumer() throws Exception
+ {
+ return (MockArtifactConsumer) lookup(DiscovererConsumer.ROLE, "mock-artifact");
+ }
+
+ public void testScanLegacy()
+ throws Exception
+ {
+ ArtifactRepository repository = getLegacyRepository();
+ List consumers = new ArrayList();
+
+ MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+
+ consumers.add( mockConsumer );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertNotNull( consumers );
+
+ Iterator it = mockConsumer.getFailureMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ String msg = (String) entry.getValue();
+ System.out.println( "Failure: " + path + " -> " + msg );
+ }
+
+ assertEquals( 3, mockConsumer.getFailureMap().size() );
+
+ assertEquals( "Path does not match a legacy repository path for an artifact", mockConsumer.getFailureMap()
+ .get( "invalid/invalid-1.0.jar" ) );
+ assertEquals( "Path filename version is empty", mockConsumer.getFailureMap().get( "invalid/jars/invalid.jar" ) );
+ assertEquals( "Path does not match a legacy repository path for an artifact", mockConsumer.getFailureMap()
+ .get( "invalid/jars/1.0/invalid-1.0.jar" ) );
+
+ assertEquals( 10, mockConsumer.getArtifactMap().size() );
+ }
+
+ public void testScanDefault()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+
+ MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+
+ consumers.add( mockConsumer );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+ // Test Statistics
+
+ assertNotNull( stats );
+
+ assertEquals( 31, stats.getFilesConsumed() );
+ assertEquals( 0, stats.getFilesSkipped() );
+ assertEquals( 31, stats.getFilesIncluded() );
+ assertTrue( stats.getElapsedMilliseconds() > 0 );
+ assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+ assertTrue( stats.getTimestampStarted() > 0 );
+
+ // Test gathered information from Mock consumer.
+
+ Iterator it;
+
+ assertNotNull( consumers );
+
+ it = mockConsumer.getFailureMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ String msg = (String) entry.getValue();
+ System.out.println( "Failure: " + path + " -> " + msg );
+ }
+
+ assertEquals( 6, mockConsumer.getFailureMap().size() );
+
+ assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", mockConsumer
+ .getFailureMap().get( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ) );
+ assertEquals( "Path is too short to build an artifact from.", mockConsumer.getFailureMap()
+ .get( "invalid/invalid-1.0.jar" ) );
+ assertEquals( "Built artifact version does not match path version", mockConsumer.getFailureMap()
+ .get( "invalid/invalid/1.0/invalid-2.0.jar" ) );
+
+ assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+ // Test for known include artifacts
+
+ Collection artifacts = mockConsumer.getArtifactMap().values();
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, artifacts );
+ assertHasArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", artifacts );
+
+ // Test for known excluded files and dirs to validate exclusions.
+
+ it = mockConsumer.getArtifactMap().values().iterator();
+ while ( it.hasNext() )
+ {
+ Artifact a = (Artifact) it.next();
+ assertTrue( "Artifact " + a + " should have it's .getFile() set.", a.getFile() != null );
+ assertTrue( "Artifact " + a + " should have it's .getRepository() set.", a.getRepository() != null );
+ assertTrue( "Artifact " + a + " should have non-null repository url.", a.getRepository().getUrl() != null );
+ assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+ assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+ }
+ }
+
+ private void assertHasArtifact( String groupId, String artifactId, String version, String type, String classifier,
+ Collection collection )
+ {
+ Artifact artifact = createArtifact( groupId, artifactId, version, type, classifier );
+ assertTrue( "Contains " + artifact, collection.contains( artifact ) );
+ }
+
+ /* This relies on File.setLastModified(long) which does not work reliably on all platforms.
+ * Notably linux and various early flavors of OSX.
+ * - Joakim
+ *
+ * TODO: Research alternative way to test this.
+ */
+ public void disabledTestScanDefaultUpdatesOnly()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+
+ // Set all files in repository to August 22 1972 (old date)
+ DiscovererStatistics stats;
+ makeRepositoryOld( repository );
+ makeFileNew( repository, "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+ makeFileNew( repository, "org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar" );
+
+ // Now do the normal thing.
+
+ List consumers = new ArrayList();
+
+ MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+
+ consumers.add( mockConsumer );
+
+ stats = discoverer.scanRepository( repository, consumers, true );
+
+ // Test Statistics
+
+ assertNotNull( stats );
+
+ assertEquals( 2, stats.getFilesConsumed() );
+ assertEquals( 23, stats.getFilesSkipped() );
+ assertEquals( 2, stats.getFilesIncluded() );
+ assertTrue( stats.getElapsedMilliseconds() > 0 );
+ assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+ assertTrue( stats.getTimestampStarted() > 0 );
+
+ // Test gathered information from Mock consumer.
+
+ Iterator it;
+
+ assertNotNull( consumers );
+
+ it = mockConsumer.getFailureMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ String msg = (String) entry.getValue();
+ System.out.println( "Failure: " + path + " -> " + msg );
+ }
+
+ assertEquals( 6, mockConsumer.getFailureMap().size() );
+
+ assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", mockConsumer
+ .getFailureMap().get( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ) );
+ assertEquals( "Path is too short to build an artifact from.", mockConsumer.getFailureMap()
+ .get( "invalid/invalid-1.0.jar" ) );
+ assertEquals( "Built artifact version does not match path version", mockConsumer.getFailureMap()
+ .get( "invalid/invalid/1.0/invalid-2.0.jar" ) );
+
+ assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+ // Test for known excluded files and dirs to validate exclusions.
+
+ it = mockConsumer.getArtifactMap().values().iterator();
+ while ( it.hasNext() )
+ {
+ Artifact a = (Artifact) it.next();
+ assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+ assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+ }
+ }
+
+ private void makeFileNew( ArtifactRepository repository, String path )
+ {
+ File file = new File( repository.getBasedir(), path );
+ file.setLastModified( System.currentTimeMillis() );
+ }
+
+ private void makeRepositoryOld( ArtifactRepository repository )
+ throws DiscovererException
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.clear();
+ cal.set( 1972, Calendar.AUGUST, 22, 1, 1, 1 );
+ long oldTime = cal.getTimeInMillis();
+
+ DiscovererStatistics stats = new DiscovererStatistics( repository );
+ stats.setTimestampFinished( oldTime + 5000 );
+ stats.save();
+
+ DirectoryScanner scanner = new DirectoryScanner();
+ scanner.setBasedir( repository.getBasedir() );
+ scanner.addDefaultExcludes();
+ scanner.setIncludes( new String[] { "**/*" } );
+ scanner.scan();
+ String files[] = scanner.getIncludedFiles();
+ for ( int i = 0; i < files.length; i++ )
+ {
+ File file = new File( files[i] );
+
+ if ( !file.setLastModified( oldTime ) )
+ {
+ fail( "Your platform apparently does not support the File.setLastModified(long) method." );
+ }
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericModelConsumerTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericModelConsumerTest
+ extends AbstractConsumerTestCase
+{
+ private MockModelConsumer getMockModelConsumer() throws Exception
+ {
+ return (MockModelConsumer) lookup(DiscovererConsumer.ROLE, "mock-model");
+ }
+
+ public void testScanLegacy()
+ throws Exception
+ {
+ ArtifactRepository repository = getLegacyRepository();
+ List consumers = new ArrayList();
+
+ MockModelConsumer mockConsumer = getMockModelConsumer();
+
+ consumers.add( mockConsumer );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertNotNull( consumers );
+
+ Iterator it = mockConsumer.getModelMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ Model model = (Model) entry.getValue();
+ System.out.println( "Model: " + path + " -> " + model );
+ }
+
+ // TODO: Add some poms to legacy repository!
+ assertEquals( 0, mockConsumer.getModelMap().size() );
+ }
+
+ public void testScanDefault()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+
+ MockModelConsumer mockConsumer = getMockModelConsumer();
+
+ consumers.add( mockConsumer );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+ // Test Statistics
+
+ assertNotNull( stats );
+
+ assertEquals( 10, stats.getFilesConsumed() );
+ assertEquals( 0, stats.getFilesSkipped() );
+ assertEquals( 10, stats.getFilesIncluded() );
+ assertTrue( stats.getElapsedMilliseconds() > 0 );
+ assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+ assertTrue( stats.getTimestampStarted() > 0 );
+
+ // Test gathered information from Mock consumer.
+
+ Iterator it;
+
+ it = mockConsumer.getModelMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ Model model = (Model) entry.getValue();
+ System.out.println( "Model: " + path + " -> " + model );
+ }
+
+ assertEquals( 10, mockConsumer.getModelMap().size() );
+
+ // Test for known include metadata
+
+ // Test for known excluded files and dirs to validate exclusions.
+
+ it = mockConsumer.getModelMap().keySet().iterator();
+ while ( it.hasNext() )
+ {
+ String path = (String) it.next();
+ assertFalse( "Check not CVS", path.indexOf( "CVS" ) >= 0 );
+ assertFalse( "Check not .svn", path.indexOf( ".svn" ) >= 0 );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.DiscovererConsumer;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * GenericRepositoryMetadataConsumerTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericRepositoryMetadataConsumerTest
+ extends AbstractConsumerTestCase
+{
+ private MockRepositoryMetadataConsumer getMockRepositoryMetadataConsumer() throws Exception
+ {
+ return (MockRepositoryMetadataConsumer) lookup(DiscovererConsumer.ROLE, "mock-metadata");
+ }
+
+ public void testScanLegacy()
+ throws Exception
+ {
+ ArtifactRepository repository = getLegacyRepository();
+ List consumers = new ArrayList();
+
+ MockRepositoryMetadataConsumer mockConsumer = getMockRepositoryMetadataConsumer();
+
+ consumers.add( mockConsumer );
+
+ try
+ {
+ discoverer.scanRepository( repository, consumers, true );
+ fail( "Should not have worked on a legacy repository." );
+ }
+ catch ( IllegalStateException e )
+ {
+ /* expected path */
+ }
+ }
+
+ public void testScanDefault()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+
+ MockRepositoryMetadataConsumer mockConsumer = getMockRepositoryMetadataConsumer();
+
+ consumers.add( mockConsumer );
+
+ DiscovererStatistics stats = discoverer.scanRepository( repository, consumers, true );
+
+ // Test Statistics
+
+ assertNotNull( stats );
+
+ assertEquals( 7, stats.getFilesConsumed() );
+ assertEquals( 0, stats.getFilesSkipped() );
+ assertEquals( 7, stats.getFilesIncluded() );
+ assertTrue( stats.getElapsedMilliseconds() > 0 );
+ assertTrue( stats.getTimestampFinished() >= stats.getTimestampStarted() );
+ assertTrue( stats.getTimestampStarted() > 0 );
+
+ // Test gathered information from Mock consumer.
+
+ Iterator it;
+
+ it = mockConsumer.getRepositoryMetadataMap().entrySet().iterator();
+ while ( it.hasNext() )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String path = (String) entry.getKey();
+ RepositoryMetadata repometa = (RepositoryMetadata) entry.getValue();
+ System.out.println( "Metadata: " + path + " -> " + repometa );
+ }
+
+ assertEquals( 5, mockConsumer.getRepositoryMetadataMap().size() );
+
+ // Test for known include metadata
+
+ // Test for known excluded files and dirs to validate exclusions.
+
+ it = mockConsumer.getRepositoryMetadataMap().keySet().iterator();
+ while ( it.hasNext() )
+ {
+ String path = (String) it.next();
+ assertFalse( "Check not CVS", path.indexOf( "CVS" ) >= 0 );
+ assertFalse( "Check not .svn", path.indexOf( ".svn" ) >= 0 );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.Artifact;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockArtifactConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ * role-hint="mock-artifact"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ private Map artifactMap = new HashMap();
+
+ private Map failureMap = new HashMap();
+
+ public void processArtifact( Artifact artifact, File file )
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ artifactMap.put( relpath, artifact );
+ }
+
+ public void processArtifactBuildFailure( File path, String message )
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), path );
+ failureMap.put( relpath, message );
+ }
+
+ public Map getArtifactMap()
+ {
+ return artifactMap;
+ }
+
+ public Map getFailureMap()
+ {
+ return failureMap;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.model.Model;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockModelConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ * role-hint="mock-model"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockModelConsumer
+ extends GenericModelConsumer
+{
+ private Map modelMap = new HashMap();
+
+ public void processModel( Model model, File file )
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ modelMap.put( relpath, model );
+ }
+
+ public Map getModelMap()
+ {
+ return modelMap;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.discoverer.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.discoverer.PathUtil;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockRepositoryMetadataConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.discoverer.DiscovererConsumers"
+ * role-hint="mock-metadata"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockRepositoryMetadataConsumer
+ extends GenericRepositoryMetadataConsumer
+{
+ private Map repositoryMetadataMap = new HashMap();
+
+ public void processRepositoryMetadata( RepositoryMetadata metadata, File file )
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ repositoryMetadataMap.put( relpath, metadata );
+ }
+
+ public Map getRepositoryMetadataMap()
+ {
+ return repositoryMetadataMap;
+ }
+}
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+ <role-hint>mock-artifact</role-hint>
+ <implementation>org.apache.maven.archiva.discoverer.consumers.MockArtifactConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+ <role-hint>mock-model</role-hint>
+ <implementation>org.apache.maven.archiva.discoverer.consumers.MockModelConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+ <role-hint>mock-metadata</role-hint>
+ <implementation>org.apache.maven.archiva.discoverer.consumers.MockRepositoryMetadataConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
import org.apache.maven.archiva.indexer.query.Query;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
import java.util.Collection;
import java.util.List;
Collection getAllRecordKeys()
throws RepositoryIndexException;
+ /**
+ * Indexes the artifact specified. If the artifact is already in the repository they it is updated.
+ * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
+ *
+ * @param artifact the artifact to index
+ * @param factory the artifact to record factory
+ * @throws RepositoryIndexException if there is a problem indexing the artifacts
+ */
+ void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException;
+
/**
* Indexes the artifacts found within the specified list. If the artifacts are already in the
* repository they are updated. This method should use less memory than indexRecords as the records can be
lastUpdatedTime = System.currentTimeMillis();
}
}
+
+ public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException
+ {
+ IndexModifier indexModifier = null;
+ try
+ {
+ indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
+ Document document = converter.convert( record );
+ document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexModifier.addDocument( document );
+ }
+ indexModifier.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
+ lastUpdatedTime = System.currentTimeMillis();
+ }
+ }
public List getAllGroupIds()
throws RepositoryIndexException
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-jdo2</artifactId>
+ <version>1.0-alpha-8</version>
+ <exclusions>
+ <exclusion>
+ <groupId>xerces</groupId>
+ <artifactId>xercesImpl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>xerces</groupId>
+ <artifactId>xmlParserAPIs</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>jpox</groupId>
+ <artifactId>jpox</artifactId>
+ <version>1.1.6</version>
+ <scope>compile</scope>
+ <exclusions>
+ <!-- targeting JDK 1.4 we don't need this -->
+ <exclusion>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc-stdext</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <!-- TEST DEPS -->
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.7.3.3</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<plugins>
<groupId>org.codehaus.modello</groupId>
<artifactId>modello-maven-plugin</artifactId>
<version>1.0-alpha-14-SNAPSHOT</version>
+ <configuration>
+ <version>1.0.0</version>
+ <packageWithVersion>false</packageWithVersion>
+ <model>src/main/mdo/reporting.mdo</model>
+ </configuration>
<executions>
<execution>
+ <id>modello-java</id>
<goals>
- <goal>xpp3-writer</goal>
<goal>java</goal>
+ <goal>jpox-metadata-class</goal>
+ <!--
+ <goal>xpp3-writer</goal>
<goal>xpp3-reader</goal>
+ -->
+ </goals>
+ </execution>
+ <execution>
+ <id>jpox-jdo-mapping</id>
+ <goals>
+ <goal>jpox-jdo-mapping</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${basedir}/target/classes/org/apache/maven/archiva/reporting/model/</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>jpox-maven-plugin</artifactId>
+ <version>1.1.6-SNAPSHOT</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>enhance</goal>
</goals>
</execution>
</executions>
- <configuration>
- <version>1.0.0</version>
- <model>src/main/mdo/reporting.mdo</model>
- </configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ReportingException
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ReportingException
+ extends Exception
+{
+
+ public ReportingException()
+ {
+ }
+
+ public ReportingException( String message )
+ {
+ super( message );
+ }
+
+ public ReportingException( Throwable cause )
+ {
+ super( cause );
+ }
+
+ public ReportingException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+
+import java.util.List;
+
+import javax.jdo.Extent;
+import javax.jdo.JDOException;
+import javax.jdo.JDOHelper;
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.JDOUserException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * AbstractJdoResults - Base class for all JDO related results.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractJdoDatabase
+ implements Initializable
+{
+ /**
+ * @plexus.requirement role-hint="archiva"
+ */
+ private JdoFactory jdoFactory;
+
+ private PersistenceManagerFactory pmf;
+
+ // -------------------------------------------------------------------
+ // JPOX / JDO Specifics.
+ // -------------------------------------------------------------------
+
+ protected List getAllObjects( Class clazz, String ordering )
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ Extent extent = pm.getExtent( clazz, true );
+
+ Query query = pm.newQuery( extent );
+
+ if ( ordering != null )
+ {
+ query.setOrdering( ordering );
+ }
+
+// for ( Iterator i = fetchGroups.iterator(); i.hasNext(); )
+// {
+// pm.getFetchPlan().addGroup( (String) i.next() );
+// }
+
+ List result = (List) query.execute();
+
+ result = (List) pm.detachCopyAll( result );
+
+ tx.commit();
+
+ return result;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ protected Object getObjectByKey( Class clazz, Object key )
+ throws JDOObjectNotFoundException, JDOException
+ {
+ if ( key == null )
+ {
+ throw new JDOException( "Unable to get object from jdo using null key." );
+ }
+
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ // if ( fetchGroup != null )
+ // {
+ // pm.getFetchPlan().addGroup( fetchGroup );
+ // }
+
+ Object objectId = pm.newObjectIdInstance( clazz, key.toString() );
+
+ Object object = pm.getObjectById( objectId );
+
+ object = pm.detachCopy( object );
+
+ tx.commit();
+
+ return object;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ public void initialize()
+ throws InitializationException
+ {
+ pmf = jdoFactory.getPersistenceManagerFactory();
+ }
+
+ protected void removeObject( Object o )
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ o = pm.getObjectById( pm.getObjectId( o ) );
+
+ pm.deletePersistent( o );
+
+ tx.commit();
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ protected Object saveObject( Object object )
+ {
+ return saveObject( object, null );
+ }
+
+ protected Object saveObject( Object object, String fetchGroups[] )
+ throws JDOException
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ if ( ( JDOHelper.getObjectId( object ) != null ) && !JDOHelper.isDetached( object ) )
+ {
+ throw new JDOException( "Existing object is not detached: " + object );
+ }
+
+ if ( fetchGroups != null )
+ {
+ for ( int i = 0; i >= fetchGroups.length; i++ )
+ {
+ pm.getFetchPlan().addGroup( fetchGroups[i] );
+ }
+ }
+
+ pm.makePersistent( object );
+
+ object = pm.detachCopy( object );
+
+ tx.commit();
+
+ return object;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ private PersistenceManager getPersistenceManager()
+ {
+ PersistenceManager pm = pmf.getPersistenceManager();
+
+ pm.getFetchPlan().setMaxFetchDepth( -1 );
+
+ return pm;
+ }
+
+ protected static void closePersistenceManager( PersistenceManager pm )
+ {
+ try
+ {
+ pm.close();
+ }
+ catch ( JDOUserException e )
+ {
+ // ignore
+ }
+ }
+
+ protected static void rollbackIfActive( Transaction tx )
+ {
+ PersistenceManager pm = tx.getPersistenceManager();
+
+ try
+ {
+ if ( tx.isActive() )
+ {
+ tx.rollback();
+ }
+ }
+ finally
+ {
+ closePersistenceManager( pm );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.Result;
+
+/**
+ * AbstractResultsDatabase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractResultsDatabase
+ extends AbstractJdoDatabase
+{
+ /**
+ * <p>
+ * Get the number of failures in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of failures in the database.
+ */
+ public abstract int getNumFailures();
+
+ /**
+ * <p>
+ * Get the number of warnings in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of warnings in the database.
+ */
+ public abstract int getNumWarnings();
+
+ /**
+ * <p>
+ * Get the number of notices in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of notices in the database.
+ */
+ public abstract int getNumNotices();
+
+ protected static Result createResult( String processor, String problem, String reason )
+ {
+ Result result = new Result();
+ result.setProcessor( processor );
+ result.setProblem( problem );
+ result.setReason( reason );
+ return result;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.ArtifactResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * ArtifactResultsDatabase - Database of ArtifactResults.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase"
+ */
+public class ArtifactResultsDatabase
+ extends AbstractResultsDatabase
+{
+ // -------------------------------------------------------------------
+ // ArtifactResults methods.
+ // -------------------------------------------------------------------
+
+ public static final String ROLE = ArtifactResultsDatabase.class.getName();
+
+ public void addFailure( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getFailures().contains( result ) )
+ {
+ results.addFailure( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addNotice( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getNotices().contains( result ) )
+ {
+ results.addNotice( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addWarning( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getWarnings().contains( result ) )
+ {
+ results.addWarning( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void clearResults( ArtifactResults results )
+ {
+ results.getFailures().clear();
+ results.getWarnings().clear();
+ results.getNotices().clear();
+
+ saveObject( results );
+ }
+
+ public List getAllArtifactResults()
+ {
+ return getAllObjects( ArtifactResults.class, null );
+ }
+
+ public Iterator getIterator()
+ {
+ List allartifacts = getAllArtifactResults();
+ if ( allartifacts == null )
+ {
+ return Collections.EMPTY_LIST.iterator();
+ }
+
+ return allartifacts.iterator();
+ }
+
+ public void remove( ArtifactResults results )
+ {
+ removeObject( results );
+ }
+
+ public void remove( Artifact artifact )
+ {
+ try
+ {
+ ArtifactResults results = lookupArtifactResults( artifact );
+ remove( results );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ // nothing to do.
+ }
+ }
+
+ /**
+ * Get an {@link ArtifactResults} from the store.
+ * If the store does not have one, create it.
+ *
+ * Equivalent to calling {@link #lookupArtifactResults(Artifact)} then if
+ * not found, using {@link #createArtifactResults(Artifact)}.
+ *
+ * @param artifact the artifact information
+ * @return the ArtifactResults object (may not be in database yet, so don't forget to {@link #saveObject(Object)})
+ * @see #lookupArtifactResults(Artifact)
+ * @see #createArtifactResults(Artifact)
+ */
+ public ArtifactResults getArtifactResults( Artifact artifact )
+ {
+ ArtifactResults results;
+
+ try
+ {
+ results = lookupArtifactResults( artifact );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ results = createArtifactResults( artifact );
+ }
+
+ return results;
+ }
+
+ /**
+ * Create a new {@link ArtifactResults} object from the provided Artifact information.
+ *
+ * @param artifact the artifact information.
+ * @return the new {@link ArtifactResults} object.
+ * @see #getArtifactResults(Artifact)
+ * @see #lookupArtifactResults(Artifact)
+ */
+ private ArtifactResults createArtifactResults( Artifact artifact )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+ * type, classifier.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ ArtifactResults results = new ArtifactResults();
+ results.setGroupId( StringUtils.defaultString( artifact.getGroupId() ) );
+ results.setArtifactId( StringUtils.defaultString( artifact.getArtifactId() ) );
+ results.setVersion( StringUtils.defaultString( artifact.getVersion() ) );
+ results.setType( StringUtils.defaultString( artifact.getType() ) );
+ results.setClassifier( StringUtils.defaultString( artifact.getClassifier() ) );
+
+ return results;
+ }
+
+ /**
+ * Lookup the {@link ArtifactResults} in the JDO store from the information in
+ * the provided Artifact.
+ *
+ * @param artifact the artifact information.
+ * @return the previously saved {@link ArtifactResults} from the JDO store.
+ * @throws JDOObjectNotFoundException if the {@link ArtifactResults} are not found.
+ * @see #getArtifactResults(Artifact)
+ * @see #createArtifactResults(Artifact)
+ */
+ private ArtifactResults lookupArtifactResults( Artifact artifact )
+ throws JDOObjectNotFoundException
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+ * type, classifier.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ ArtifactResultsKey key = new ArtifactResultsKey();
+ key.groupId = StringUtils.defaultString( artifact.getGroupId() );
+ key.artifactId = StringUtils.defaultString( artifact.getArtifactId() );
+ key.version = StringUtils.defaultString( artifact.getVersion() );
+ key.type = StringUtils.defaultString( artifact.getType() );
+ key.classifier = StringUtils.defaultString( artifact.getClassifier() );
+
+ return (ArtifactResults) getObjectByKey( ArtifactResults.class, key );
+ }
+
+ public int getNumFailures()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getFailures().size();
+ }
+ return count;
+ }
+
+ public int getNumNotices()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getNotices().size();
+ }
+ return count;
+ }
+
+ public int getNumWarnings()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getWarnings().size();
+ }
+ return count;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.MetadataResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * MetadataResultsDatabase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.MetadataResultsDatabase"
+ */
+public class MetadataResultsDatabase
+ extends AbstractResultsDatabase
+{
+ public static final String ROLE = MetadataResultsDatabase.class.getName();
+
+ public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getFailures().contains( result ) )
+ {
+ results.addFailure( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getWarnings().contains( result ) )
+ {
+ results.addWarning( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getNotices().contains( result ) )
+ {
+ results.addNotice( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void clearResults( MetadataResults results )
+ {
+ results.getFailures().clear();
+ results.getWarnings().clear();
+ results.getNotices().clear();
+
+ saveObject( results );
+ }
+
+ public List getAllMetadataResults()
+ {
+ return getAllObjects( MetadataResults.class, null );
+ }
+
+ public Iterator getIterator()
+ {
+ List allmetadatas = getAllMetadataResults();
+ if ( allmetadatas == null )
+ {
+ return Collections.EMPTY_LIST.iterator();
+ }
+
+ return allmetadatas.iterator();
+ }
+
+ public void remove( MetadataResults results )
+ {
+ removeObject( results );
+ }
+
+ public void remove( RepositoryMetadata metadata )
+ {
+ try
+ {
+ MetadataResults results = lookupMetadataResults( metadata );
+ remove( results );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ // nothing to do.
+ }
+ }
+
+ public MetadataResults getMetadataResults( RepositoryMetadata metadata )
+ {
+ MetadataResults results;
+
+ try
+ {
+ results = lookupMetadataResults( metadata );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ results = createMetadataResults( metadata );
+ }
+
+ return results;
+ }
+
+ private MetadataResults createMetadataResults( RepositoryMetadata metadata )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ MetadataResults results = new MetadataResults();
+ results.setGroupId( StringUtils.defaultString( metadata.getGroupId() ) );
+ results.setArtifactId( StringUtils.defaultString( metadata.getArtifactId() ) );
+ results.setVersion( StringUtils.defaultString( metadata.getBaseVersion() ) );
+
+ return results;
+ }
+
+ private MetadataResults lookupMetadataResults( RepositoryMetadata metadata )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ MetadataResultsKey key = new MetadataResultsKey();
+ key.groupId = StringUtils.defaultString( metadata.getGroupId(), "" );
+ key.artifactId = StringUtils.defaultString( metadata.getArtifactId(), "" );
+ key.version = StringUtils.defaultString( metadata.getBaseVersion(), "" );
+
+ return (MetadataResults) getObjectByKey( MetadataResults.class, key );
+ }
+
+ public int getNumFailures()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getFailures().size();
+ }
+ return count;
+ }
+
+ public int getNumNotices()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getNotices().size();
+ }
+ return count;
+ }
+
+ public int getNumWarnings()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getWarnings().size();
+ }
+ return count;
+ }
+}
* under the License.
*/
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Date;
-import java.util.HashMap;
import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
/**
+ * The Main Reporting Database.
+ *
* @todo i18n, including message formatting and parameterisation
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ReportingDatabase"
*/
public class ReportingDatabase
{
- private final Reporting reporting;
-
- private Map artifactMap;
-
- private Map metadataMap;
-
- private int numFailures;
-
- private int numWarnings;
-
- private ArtifactRepository repository;
-
- private boolean inProgress;
-
- private long startTime;
-
- private final ReportGroup reportGroup;
-
- private Set metadataWithProblems;
-
- private Map filteredDatabases = new HashMap();
-
- private int numNotices;
-
- public ReportingDatabase( ReportGroup reportGroup )
- {
- this( reportGroup, new Reporting() );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
- {
- this( reportGroup, reporting, null );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
- {
- this( reportGroup, new Reporting(), repository );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
- {
- this.reportGroup = reportGroup;
-
- this.reporting = reporting;
-
- this.repository = repository;
-
- initArtifactMap();
-
- initMetadataMap();
- }
-
- public void addFailure( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getFailures().contains( result ) )
- {
- results.addFailure( result );
- numFailures++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- }
- }
-
- public void addNotice( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getNotices().contains( result ) )
- {
- results.addNotice( result );
- numNotices++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- }
- }
-
- public void addWarning( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getWarnings().contains( result ) )
- {
- results.addWarning( result );
- numWarnings++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- }
- }
-
- ArtifactResults getArtifactResults( Artifact artifact )
- {
- return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getType(), artifact.getClassifier() );
- }
-
- private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- Map artifactMap = this.artifactMap;
-
- String key = getArtifactKey( groupId, artifactId, version, type, classifier );
- ArtifactResults results = (ArtifactResults) artifactMap.get( key );
- if ( results == null )
- {
- results = new ArtifactResults();
- results.setArtifactId( artifactId );
- results.setClassifier( classifier );
- results.setGroupId( groupId );
- results.setType( type );
- results.setVersion( version );
-
- artifactMap.put( key, results );
- reporting.getArtifacts().add( results );
- }
-
- return results;
- }
-
- private void initArtifactMap()
- {
- Map map = new HashMap();
- for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- ArtifactResults result = (ArtifactResults) i.next();
-
- String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
- result.getType(), result.getClassifier() );
- map.put( key, result );
-
- numFailures += result.getFailures().size();
- numWarnings += result.getWarnings().size();
- numNotices += result.getNotices().size();
- }
- artifactMap = map;
- }
-
- private static String getArtifactKey( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
- }
-
- private static Result createResult( String processor, String problem, String reason )
- {
- Result result = new Result();
- result.setProcessor( processor );
- result.setProblem( problem );
- result.setReason( reason );
- return result;
- }
-
- public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getFailures().contains( result ) )
- {
- results.addFailure( result );
- numFailures++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- }
- }
-
- public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getWarnings().contains( result ) )
- {
- results.addWarning( result );
- numWarnings++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- }
- }
-
- public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getNotices().contains( result ) )
- {
- results.addNotice( result );
- numNotices++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- }
- }
-
- public Set getMetadataWithProblems()
- {
- return metadataWithProblems;
- }
-
- private void initMetadataMap()
- {
- Map map = new HashMap();
- Set problems = new LinkedHashSet();
-
- for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
- {
- MetadataResults result = (MetadataResults) i.next();
-
- String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
-
- map.put( key, result );
-
- numFailures += result.getFailures().size();
- numWarnings += result.getWarnings().size();
- numNotices += result.getNotices().size();
-
- if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
- {
- problems.add( result );
- }
- }
- metadataMap = map;
- metadataWithProblems = problems;
- }
+ public static final String ROLE = ReportingDatabase.class.getName();
- private static String getMetadataKey( String groupId, String artifactId, String version )
- {
- return groupId + ":" + artifactId + ":" + version;
- }
-
- public int getNumFailures()
- {
- return numFailures;
- }
-
- public int getNumWarnings()
- {
- return numWarnings;
- }
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase artifactDatabase;
- public Reporting getReporting()
- {
- return reporting;
- }
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase metadataDatabase;
public Iterator getArtifactIterator()
{
- return reporting.getArtifacts().iterator();
+ return artifactDatabase.getIterator();
}
public Iterator getMetadataIterator()
{
- return reporting.getMetadata().iterator();
+ return metadataDatabase.getIterator();
}
- public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+ public void clear()
{
- String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
- Map map = metadataMap;
- MetadataResults results = (MetadataResults) map.get( key );
- return results != null && results.getLastModified() >= timestamp;
}
/**
- * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
- *
- * @param metadata the metadata
- * @param lastModified the modification time of the file being tracked
+ * <p>
+ * Get the number of failures in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of failures in the database.
*/
- public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
- {
- MetadataResults results = getMetadataResults( metadata, lastModified );
-
- results.setLastModified( lastModified );
-
- numFailures -= results.getFailures().size();
- results.getFailures().clear();
-
- numWarnings -= results.getWarnings().size();
- results.getWarnings().clear();
-
- numNotices -= results.getWarnings().size();
- results.getNotices().clear();
-
- metadataWithProblems.remove( results );
- }
-
- MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
- {
- return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
- lastModified );
- }
-
- private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
- long lastModified )
- {
- String key = getMetadataKey( groupId, artifactId, baseVersion );
- Map metadataMap = this.metadataMap;
- MetadataResults results = (MetadataResults) metadataMap.get( key );
- if ( results == null )
- {
- results = new MetadataResults();
- results.setArtifactId( artifactId );
- results.setGroupId( groupId );
- results.setVersion( baseVersion );
- results.setLastModified( lastModified );
-
- metadataMap.put( key, results );
- reporting.getMetadata().add( results );
- }
- return results;
- }
-
- public void removeArtifact( Artifact artifact )
- {
- Map map = artifactMap;
-
- String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getType(), artifact.getClassifier() );
- ArtifactResults results = (ArtifactResults) map.get( key );
- if ( results != null )
- {
- for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- if ( results.equals( i.next() ) )
- {
- i.remove();
- }
- }
-
- numFailures -= results.getFailures().size();
- numWarnings -= results.getWarnings().size();
- numNotices -= results.getNotices().size();
-
- map.remove( key );
- }
- }
-
- public ArtifactRepository getRepository()
- {
- return repository;
- }
-
- public boolean isInProgress()
- {
- return inProgress;
- }
-
- public void setInProgress( boolean inProgress )
- {
- this.inProgress = inProgress;
-
- if ( inProgress )
- {
- startTime = System.currentTimeMillis();
- }
- }
-
- public void clear()
- {
- // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
- numWarnings = 0;
- numNotices = 0;
- numFailures = 0;
-
- artifactMap.clear();
- metadataMap.clear();
- metadataWithProblems.clear();
- filteredDatabases.clear();
-
- reporting.getArtifacts().clear();
- reporting.getMetadata().clear();
-
- updateTimings();
- }
-
- public void setStartTime( long startTime )
- {
- this.startTime = startTime;
- }
-
- public long getStartTime()
- {
- return startTime;
- }
-
- public void updateTimings()
- {
- long startTime = getStartTime();
- Date endTime = new Date();
- if ( startTime > 0 )
- {
- getReporting().setExecutionTime( endTime.getTime() - startTime );
- }
- getReporting().setLastModified( endTime.getTime() );
- }
-
- public ReportGroup getReportGroup()
+ public int getNumFailures()
{
- return reportGroup;
+ int count = 0;
+ count += artifactDatabase.getNumFailures();
+ count += metadataDatabase.getNumFailures();
+ return count;
}
- public ReportingDatabase getFilteredDatabase( String filter )
+ /**
+ * <p>
+ * Get the number of notices in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of notices in the database.
+ */
+ public int getNumNotices()
{
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
-
- if ( reportingDatabase == null )
- {
- reportingDatabase = new ReportingDatabase( reportGroup, repository );
-
- Reporting reporting = reportingDatabase.getReporting();
- reporting.setExecutionTime( this.reporting.getExecutionTime() );
- reporting.setLastModified( this.reporting.getLastModified() );
-
- for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- ArtifactResults results = (ArtifactResults) i.next();
- ArtifactResults targetResults = null;
- for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addFailure( result );
- reportingDatabase.numFailures++;
- }
- }
- for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addWarning( result );
- reportingDatabase.numWarnings++;
- }
- }
- for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addNotice( result );
- reportingDatabase.numNotices++;
- }
- }
- }
- for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
- {
- MetadataResults results = (MetadataResults) i.next();
- MetadataResults targetResults = null;
- for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addFailure( result );
- reportingDatabase.numFailures++;
- }
- }
- for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addWarning( result );
- reportingDatabase.numWarnings++;
- }
- }
- for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addNotice( result );
- reportingDatabase.numNotices++;
- }
- }
- }
-
- filteredDatabases.put( filter, reportingDatabase );
- }
-
- return reportingDatabase;
+ int count = 0;
+ count += artifactDatabase.getNumNotices();
+ count += metadataDatabase.getNumNotices();
+ return count;
}
- private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+ /**
+ * <p>
+ * Get the number of warnings in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of warnings in the database.
+ */
+ public int getNumWarnings()
{
- MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
- results.getArtifactId(),
- results.getVersion(),
- results.getLastModified() );
- reportingDatabase.metadataWithProblems.add( targetResults );
- return targetResults;
+ int count = 0;
+ count += artifactDatabase.getNumWarnings();
+ count += metadataDatabase.getNumWarnings();
+ return count;
}
- private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+ public ArtifactResultsDatabase getArtifactDatabase()
{
- return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
- results.getVersion(), results.getType(), results.getClassifier() );
+ return artifactDatabase;
}
- public int getNumNotices()
+ public MetadataResultsDatabase getMetadataDatabase()
{
- return numNotices;
+ return metadataDatabase;
}
}
+++ /dev/null
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Report executor implementation.
- *
- * @todo should the report set be limitable by configuration?
- * @plexus.component
- */
-public class DefaultReportExecutor
- extends AbstractLogEnabled
- implements ReportExecutor
-{
- /**
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- /**
- * @plexus.requirement
- */
- private ReportingStore reportingStore;
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
- */
- private Map artifactDiscoverers;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
- */
- private Map metadataDiscoverers;
-
- private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
- public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
- throws ReportingStoreException
- {
- ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
- for ( Iterator i = metadata.iterator(); i.hasNext(); )
- {
- RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
-
- File file =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
-
- reportGroup.processMetadata( repositoryMetadata, repository, reporter );
- }
-
- reportingStore.storeReports( reporter, repository );
- }
-
- public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
- throws ReportingStoreException
- {
- ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
-
- Model model = null;
- try
- {
- Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
- MavenProject project =
- projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
-
- model = project.getModel();
- }
- catch ( InvalidArtifactRTException e )
- {
- reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
- }
- catch ( ProjectBuildingException e )
- {
- reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
- }
-
- reporter.removeArtifact( artifact );
-
- reportGroup.processArtifact( artifact, model, reporter );
- }
-
- reportingStore.storeReports( reporter, repository );
- }
-
- public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException
- {
- getLogger().debug(
- "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
- return reportingStore.getReportsFromStore( repository, reportGroup );
- }
-
- public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
- ArtifactFilter filter )
- throws DiscovererException, ReportingStoreException
- {
- // Flush (as in toilet, not store) the report database
- ReportingDatabase database = getReportDatabase( repository, reportGroup );
- database.clear();
-
- // Discovery process
- String layoutProperty = getRepositoryLayout( repository.getLayout() );
- ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-
- // Save some memory by not tracking paths we won't use
- // TODO: Plexus CDC should be able to inject this configuration
- discoverer.setTrackOmittedPaths( false );
-
- List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
- if ( !artifacts.isEmpty() )
- {
- getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
-
- // Work through these in batches, then flush the project cache.
- for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
- {
- int end = j + ARTIFACT_BUFFER_SIZE;
- List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
- // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
- // run the reports.
- runArtifactReports( reportGroup, currentArtifacts, repository );
-
- // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
- // around that. TODO: remove when it is configurable
- flushProjectBuilderCacheHack();
- }
- }
-
- MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
- List metadata =
- metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-
- if ( !metadata.isEmpty() )
- {
- getLogger().info( "Discovered " + metadata.size() + " metadata files" );
-
- // run the reports
- runMetadataReports( reportGroup, metadata, repository );
- }
- }
-
- private String getRepositoryLayout( ArtifactRepositoryLayout layout )
- {
- // gross limitation that there is no reverse lookup of the hint for the layout.
- if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
- {
- return "default";
- }
- else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
- {
- return "legacy";
- }
- else
- {
- throw new IllegalArgumentException( "Unknown layout: " + layout );
- }
- }
-
- private void flushProjectBuilderCacheHack()
- {
- try
- {
- if ( projectBuilder != null )
- {
- java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
- f.setAccessible( true );
- Map cache = (Map) f.get( projectBuilder );
- cache.clear();
-
- f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
- f.setAccessible( true );
- cache = (Map) f.get( projectBuilder );
- cache.clear();
- }
- }
- catch ( NoSuchFieldException e )
- {
- throw new RuntimeException( e );
- }
- catch ( IllegalAccessException e )
- {
- throw new RuntimeException( e );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Executes a report or report group.
- */
-public interface ReportExecutor
-{
- /**
- * Plexus component role name.
- */
- String ROLE = ReportExecutor.class.getName();
-
- /**
- * Run reports on a set of metadata.
- *
- * @param reportGroup the report set to run
- * @param metadata the RepositoryMetadata objects to report on
- * @param repository the repository that they come from
- * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException
- * if there is a problem reading/writing the report database
- */
- public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
- throws ReportingStoreException;
-
- /**
- * Run reports on a set of artifacts.
- *
- * @param reportGroup the report set to run
- * @param artifacts the Artifact objects to report on
- * @param repository the repository that they come from
- * @throws ReportingStoreException if there is a problem reading/writing the report database
- */
- public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
- throws ReportingStoreException;
-
- /**
- * Get the report database in use for a given repository.
- *
- * @param repository the repository
- * @param reportGroup the report set to run
- * @return the report database
- * @throws ReportingStoreException if there is a problem reading the report database
- */
- ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException;
-
- /**
- * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
- *
- * @param repository the repository to run from
- * @param blacklistedPatterns the patterns to exclude during discovery
- * @param filter the filter to use during discovery to get a consistent list of artifacts
- * @param reportGroup the report set to run
- * @throws ReportingStoreException if there is a problem reading/writing the report database
- * @throws org.apache.maven.archiva.discoverer.DiscovererException
- * if there is a problem finding the artifacts and metadata to report on
- */
- public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
- ArtifactFilter filter )
- throws DiscovererException, ReportingStoreException;
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Implementation of a reporting filter. Artifacts already in the database are ignored.
- */
-public class ReportingMetadataFilter
- implements MetadataFilter
-{
- private final ReportingDatabase reporter;
-
- public ReportingMetadataFilter( ReportingDatabase reporter )
- {
- this.reporter = reporter;
- }
-
- public boolean include( RepositoryMetadata metadata, long timestamp )
- {
- return !reporter.isMetadataUpToDate( metadata, timestamp );
- }
-}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
import org.apache.maven.artifact.Artifact;
*/
private Map metadataReports;
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
+ public void processArtifact( Artifact artifact, Model model )
{
for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
{
{
ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
- report.processArtifact( artifact, model, reportingDatabase );
+ report.processArtifact( artifact, model );
}
}
}
- public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
- ReportingDatabase reportingDatabase )
+ public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository )
{
for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
{
{
MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
- report.processMetadata( repositoryMetadata, repository, reportingDatabase );
+ report.processMetadata( repositoryMetadata, repository );
}
}
}
/**
* The default report set, for repository health.
*
- * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health"
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup"
+ * role-hint="health"
* @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
*/
public class DefaultReportGroup
{
return "Repository Health";
}
-
- public String getFilename()
- {
- return "health-report.xml";
- }
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
*
* @param artifact the artifact to process
* @param model the POM associated with the artifact to process
- * @param reportingDatabase the report database to store results in
*/
- void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
+ void processArtifact( Artifact artifact, Model model );
/**
* Run any metadata related reports in the report set.
*
* @param repositoryMetadata the metadata to process
* @param repository the repository the metadata is located in
- * @param reportingDatabase the report database to store results in
*/
- void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
- ReportingDatabase reportingDatabase );
+ void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository );
/**
* Whether a report with the given role hint is included in this report set.
* @return the report name
*/
String getName();
-
- /**
- * Get the filename of the reports within the repository's reports directory.
- *
- * @return the filename
- */
- String getFilename();
}
--- /dev/null
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * ArtifactResultsKey - used by jpox for application identity for the {@link ArtifactResults} object and table.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsKey
+ implements Serializable
+{
+ public String groupId = "";
+
+ public String artifactId = "";
+
+ public String version = "";
+
+ public String type = "";
+
+ public String classifier = "";
+
+ public ArtifactResultsKey()
+ {
+ /* do nothing */
+ }
+
+ public ArtifactResultsKey( String key )
+ {
+ String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+ groupId = parts[0];
+ artifactId = parts[1];
+ version = parts[2];
+ type = parts[3];
+ classifier = parts[4];
+ }
+
+ public String toString()
+ {
+ return StringUtils.join( new String[] { groupId, artifactId, version, type, classifier }, ':' );
+ }
+
+ public int hashCode()
+ {
+ final int PRIME = 31;
+ int result = 1;
+ result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+ result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+ result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+ result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() );
+ result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() );
+ return result;
+ }
+
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+
+ if ( obj == null )
+ {
+ return false;
+ }
+
+ if ( getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+ if ( groupId == null )
+ {
+ if ( other.groupId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !groupId.equals( other.groupId ) )
+ {
+ return false;
+ }
+
+ if ( artifactId == null )
+ {
+ if ( other.artifactId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !artifactId.equals( other.artifactId ) )
+ {
+ return false;
+ }
+
+ if ( version == null )
+ {
+ if ( other.version != null )
+ {
+ return false;
+ }
+ }
+ else if ( !version.equals( other.version ) )
+ {
+ return false;
+ }
+
+ if ( type == null )
+ {
+ if ( other.type != null )
+ {
+ return false;
+ }
+ }
+ else if ( !type.equals( other.type ) )
+ {
+ return false;
+ }
+
+ if ( classifier == null )
+ {
+ if ( other.classifier != null )
+ {
+ return false;
+ }
+ }
+ else if ( !classifier.equals( other.classifier ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * MetadataResultsKey - used by jpox for application identity for the {@link MetadataResults} object and table.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsKey
+ implements Serializable
+{
+ public String groupId = "";
+
+ public String artifactId = "";
+
+ public String version = "";
+
+ public MetadataResultsKey()
+ {
+ /* do nothing */
+ }
+
+ public MetadataResultsKey( String key )
+ {
+ String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+ groupId = parts[0];
+ artifactId = parts[1];
+ version = parts[2];
+ }
+
+ public String toString()
+ {
+ return StringUtils.join( new String[] { groupId, artifactId, version }, ':' );
+ }
+
+ public int hashCode()
+ {
+ final int PRIME = 31;
+ int result = 1;
+ result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+ result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+ result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+ return result;
+ }
+
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+
+ if ( obj == null )
+ {
+ return false;
+ }
+
+ if ( getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+ if ( groupId == null )
+ {
+ if ( other.groupId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !groupId.equals( other.groupId ) )
+ {
+ return false;
+ }
+
+ if ( artifactId == null )
+ {
+ if ( other.artifactId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !artifactId.equals( other.artifactId ) )
+ {
+ return false;
+ }
+
+ if ( version == null )
+ {
+ if ( other.version != null )
+ {
+ return false;
+ }
+ }
+ else if ( !version.equals( other.version ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Model;
{
String ROLE = ArtifactReportProcessor.class.getName();
- void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
+ void processArtifact( Artifact artifact, Model model );
}
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.layer.RepositoryQueryLayer;
import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
*/
private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
private static final String ROLE_HINT = "bad-metadata";
/**
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
if ( metadata.storedInGroupDirectory() )
{
try
{
- checkPluginMetadata( metadata, repository, reporter );
+ checkPluginMetadata( metadata, repository );
}
catch ( IOException e )
{
- addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
+ addWarning( metadata, null, "Error getting plugin artifact directories versions: " + e );
}
}
else
}
if ( !found )
{
- addFailure( reporter, metadata, "missing-last-updated",
- "Missing lastUpdated element inside the metadata." );
+ addFailure( metadata, "missing-last-updated", "Missing lastUpdated element inside the metadata." );
}
if ( metadata.storedInArtifactVersionDirectory() )
{
- checkSnapshotMetadata( metadata, repository, reporter );
+ checkSnapshotMetadata( metadata, repository );
}
else
{
- checkMetadataVersions( metadata, repository, reporter );
+ checkMetadataVersions( metadata, repository );
try
{
- checkRepositoryVersions( metadata, repository, reporter );
+ checkRepositoryVersions( metadata, repository );
}
catch ( IOException e )
{
String reason = "Error getting plugin artifact directories versions: " + e;
- addWarning( reporter, metadata, null, reason );
+ addWarning( metadata, null, reason );
}
}
}
}
- private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addWarning( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+ database.addWarning( metadata, ROLE_HINT, problem, reason );
}
/**
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
throws IOException
{
- File metadataDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ File metadataDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+ .getParentFile();
List pluginDirs = getArtifactIdFiles( metadataDir );
Map prefixes = new HashMap();
String artifactId = plugin.getArtifactId();
if ( artifactId == null || artifactId.length() == 0 )
{
- addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+ addFailure( metadata, "missing-artifact-id:" + plugin.getPrefix(),
"Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
}
String prefix = plugin.getPrefix();
if ( prefix == null || prefix.length() == 0 )
{
- addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+ addFailure( metadata, "missing-plugin-prefix:" + artifactId,
"Missing or empty plugin prefix for artifactId " + artifactId + "." );
}
else
{
if ( prefixes.containsKey( prefix ) )
{
- addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
- "Duplicate plugin prefix found: " + prefix + "." );
+ addFailure( metadata, "duplicate-plugin-prefix:" + prefix, "Duplicate plugin prefix found: "
+ + prefix + "." );
}
else
{
File pluginDir = new File( metadataDir, artifactId );
if ( !pluginDirs.contains( pluginDir ) )
{
- addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
- "Metadata plugin " + artifactId + " not found in the repository" );
+ addFailure( metadata, "missing-plugin-from-repository:" + artifactId, "Metadata plugin "
+ + artifactId + " not found in the repository" );
}
else
{
for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
{
File plugin = (File) plugins.next();
- addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
- plugin.getName() + " is present in the repository but " + "missing in the metadata." );
+ addFailure( metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + plugin.getName()
+ + " is present in the repository but " + "missing in the metadata." );
}
}
}
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+ RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
Versioning versioning = metadata.getMetadata().getVersioning();
if ( versioning != null )
{
Snapshot snapshot = versioning.getSnapshot();
- String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
- snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
- Artifact artifact =
- artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+ String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, snapshot
+ .getTimestamp()
+ + "-" + snapshot.getBuildNumber() );
+ Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(),
+ version );
artifact.isSnapshot(); // trigger baseVersion correction
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
- "Snapshot artifact " + version + " does not exist." );
+ addFailure( metadata, "missing-snapshot-artifact-from-repository:" + version, "Snapshot artifact "
+ + version + " does not exist." );
}
}
}
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository )
{
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+ RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
Versioning versioning = metadata.getMetadata().getVersioning();
if ( versioning != null )
{
String version = (String) versions.next();
- Artifact artifact =
- artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+ Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata
+ .getArtifactId(), version );
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
- version + " is present in metadata but " + "missing in the repository." );
+ addFailure( metadata, "missing-artifact-from-repository:" + version, "Artifact version " + version
+ + " is present in metadata but " + "missing in the repository." );
}
}
}
* @param reporter the ReportingDatabase to receive processing results
* @throws java.io.IOException if there is a problem reading from the file system
*/
- private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository )
throws IOException
{
Versioning versioning = metadata.getMetadata().getVersioning();
List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
- File versionsDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+ .getParentFile();
// TODO: I don't know how this condition can happen, but it was seen on the main repository.
// Avoid hard failure
String version = path.getParentFile().getName();
if ( !metadataVersions.contains( version ) )
{
- addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
- version + " found in the repository but " + "missing in the metadata." );
+ addFailure( metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + version
+ + " found in the repository but " + "missing in the metadata." );
}
}
}
else
{
- addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
+ addFailure( metadata, null, "Metadata's directory did not exist: " + versionsDir );
}
}
return artifactIdFiles;
}
- private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addFailure( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ database.addFailure( metadata, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
* @plexus.requirement role-hint="md5"
*/
private Digester md5Digester;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
private static final String ROLE_HINT = "checksum";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
// TODO: make md5 configurable
// verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, artifact );
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ReportingDatabase reporter, Artifact artifact )
+ Artifact artifact )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
}
catch ( DigesterException e )
{
- addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
+ addFailure( artifact, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
+ addFailure( artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- addFailure( reporter, artifact, "checksum-missing",
+ addFailure( artifact, "checksum-missing",
digester.getAlgorithm() + " checksum file does not exist." );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.codehaus.plexus.digest.Digester;
*/
private Digester md5Digester;
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
private static final String ROLE_HINT = "checksum-metadata";
/**
* Validate the checksums of the metadata. Get the metadata file from the
* repository then validate the checksum.
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
//check if checksum files exist
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
File file = new File( repository.getBasedir(), path );
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+ verifyChecksum( repository, path + ".md5", file, md5Digester, metadata );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, metadata );
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ReportingDatabase reporter, RepositoryMetadata metadata )
+ RepositoryMetadata metadata )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
}
catch ( DigesterException e )
{
- addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
+ addFailure( metadata, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
+ addFailure( metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- addFailure( reporter, metadata, "checksum-missing",
- digester.getAlgorithm() + " checksum file does not exist." );
+ addFailure( metadata, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." );
}
}
- private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addFailure( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ database.addFailure( metadata, ROLE_HINT, problem, reason );
}
}
import org.apache.maven.archiva.layer.RepositoryQueryLayer;
import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
*/
private RepositoryQueryLayerFactory layerFactory;
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
private static final String POM = "pom";
private static final String ROLE_HINT = "dependency";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
if ( !queryLayer.containsArtifact( artifact ) )
{
// TODO: is this even possible?
- addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+ addFailure( artifact, "missing-artifact", "Artifact does not exist in the repository" );
}
if ( model != null && POM.equals( artifact.getType() ) )
{
List dependencies = model.getDependencies();
- processDependencies( dependencies, reporter, queryLayer, artifact );
+ processDependencies( dependencies, queryLayer, artifact );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
- private void processDependencies( List dependencies, ReportingDatabase reporter,
- RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
+ private void processDependencies( List dependencies, RepositoryQueryLayer repositoryQueryLayer,
+ Artifact sourceArtifact )
{
if ( dependencies.size() > 0 )
{
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- String reason = MessageFormat.format(
- "Artifact''s dependency {0} does not exist in the repository",
- new String[]{getDependencyString( dependency )} );
- addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
- reason );
+ String reason = MessageFormat
+ .format( "Artifact''s dependency {0} does not exist in the repository",
+ new String[] { getDependencyString( dependency ) } );
+ addFailure( sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), reason );
}
}
catch ( InvalidVersionSpecificationException e )
{
String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
- new String[]{getDependencyString( dependency ),
- dependency.getVersion()} );
- addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
+ new String[] {
+ getDependencyString( dependency ),
+ dependency.getVersion() } );
+ addFailure( sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
}
}
}
}
return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
- dependency.getType(), dependency.getClassifier(),
- dependency.getScope() );
+ dependency.getType(), dependency.getClassifier(), dependency
+ .getScope() );
}
}
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
*/
private String indexDirectory;
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
private static final String ROLE_HINT = "duplicate";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( artifact.getFile() != null )
}
catch ( DigesterException e )
{
- addWarning( reporter, artifact, null,
- "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+ addWarning( artifact, null, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
}
if ( checksum != null )
{
try
{
- List results = index.search( new LuceneQuery(
- new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+ List results = index
+ .search( new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum
+ .toLowerCase() ) ) ) );
if ( !results.isEmpty() )
{
String groupId = artifact.getGroupId();
if ( groupId.equals( result.getGroupId() ) )
{
- addFailure( reporter, artifact, "duplicate",
- "Found duplicate for " + artifact.getId() );
+ addFailure( artifact, "duplicate", "Found duplicate for " + artifact.getId() );
}
}
}
}
catch ( RepositoryIndexSearchException e )
{
- addWarning( reporter, artifact, null, "Failed to search in index" + e );
+ addWarning( artifact, null, "Failed to search in index" + e );
}
}
}
else
{
- addWarning( reporter, artifact, null, "Artifact file is null" );
+ addWarning( artifact, null, "Artifact file is null" );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addWarning( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+ database.addWarning( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
{
private static final String ROLE_HINT = "invalid-pom";
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
/**
* @param artifact The pom xml file to be validated, passed as an artifact object.
* @param reporter The artifact reporter object.
*/
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
if ( "pom".equals( artifact.getType().toLowerCase() ) )
if ( !f.exists() )
{
- addFailure( reporter, artifact, "pom-missing", "POM not found." );
+ addFailure( artifact, "pom-missing", "POM not found." );
}
else
{
}
catch ( XmlPullParserException e )
{
- addFailure( reporter, artifact, "pom-parse-exception",
+ addFailure( artifact, "pom-parse-exception",
"The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, artifact, "pom-io-exception",
- "Error while reading the pom xml file: " + e.getMessage() );
+ addFailure( artifact, "pom-io-exception", "Error while reading the pom xml file: " + e.getMessage() );
}
finally
{
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.project.MavenProjectBuilder;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
private ArtifactFactory artifactFactory;
// TODO: share with other code with the same
- private static final Set JAR_FILE_TYPES =
- new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+ private static final Set JAR_FILE_TYPES = new HashSet( Arrays.asList( new String[] {
+ "jar",
+ "war",
+ "par",
+ "ejb",
+ "ear",
+ "rar",
+ "sar" } ) );
/**
* @plexus.requirement
*/
- private MavenProjectBuilder projectBuilder;
+ private ArtifactResultsDatabase database;
private static final String POM = "pom";
* location is valid based on the location specified in the pom. Check if the both the location
* specified in the file system pom and in the pom included in the package is the same.
*/
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
adjustDistributionArtifactHandler( artifact );
{
//check if the artifact is located in its proper location based on the info
//specified in the model object/pom
- Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
- model.getArtifactId(),
- model.getVersion(),
- artifact.getType(),
- artifact.getClassifier() );
+ Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), model
+ .getArtifactId(), model.getVersion(), artifact.getType(), artifact.getClassifier() );
adjustDistributionArtifactHandler( modelArtifact );
String modelPath = repository.pathOf( modelArtifact );
if ( !modelPath.equals( artifactPath ) )
{
- addFailure( reporter, artifact, "repository-pom-location",
- "The artifact is out of place. It does not match the specified location in the repository pom: " +
- modelPath );
+ addFailure( artifact, "repository-pom-location",
+ "The artifact is out of place. It does not match the specified location in the repository pom: "
+ + modelPath );
}
}
}
{
//unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
//check if the pom is included in the package
- Model extractedModel = readArtifactModel( file, artifact, reporter );
+ Model extractedModel = readArtifactModel( file, artifact );
if ( extractedModel != null )
{
extractedModel.getPackaging() );
if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
{
- addFailure( reporter, artifact, "packaged-pom-location",
+ addFailure( artifact, "packaged-pom-location",
"The artifact is out of place. It does not match the specified location in the packaged pom." );
}
}
}
else
{
- addFailure( reporter, artifact, "missing-artifact",
- "The artifact file [" + file + "] cannot be found for metadata." );
+ addFailure( artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
private static void adjustDistributionArtifactHandler( Artifact artifact )
}
}
- private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
+ private Model readArtifactModel( File file, Artifact artifact )
{
Model model = null;
jar = new JarFile( file );
//Get the entry and its input stream.
- JarEntry entry = jar.getJarEntry(
- "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
+ JarEntry entry = jar.getJarEntry( "META-INF/maven/" + artifact.getGroupId() + "/"
+ + artifact.getArtifactId() + "/pom.xml" );
// If the entry is not null, extract it.
if ( entry != null )
}
catch ( IOException e )
{
- addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
+ addWarning( artifact, "Unable to read artifact to extract model: " + e );
}
catch ( XmlPullParserException e )
{
- addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
+ addWarning( artifact, "Unable to parse extracted model: " + e );
}
finally
{
return model;
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+ private void addWarning( Artifact artifact, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( artifact, ROLE_HINT, null, reason );
+ database.addWarning( artifact, ROLE_HINT, null, reason );
}
private Model readModel( InputStream entryStream )
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
{
String ROLE = MetadataReportProcessor.class.getName();
- void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
+ void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository );
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
* @plexus.configuration default-value="31536000"
*/
private int maxAge;
+
+ /**
+ * TODO: Must create an 'Old Artifact' database.
+ * TODO: Base this off of an artifact table query instead.
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
+ database.addNotice( artifact, ROLE_HINT, "old-artifact",
"The artifact is older than the maximum age of " + maxAge + " seconds." );
}
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
*/
private int maxSnapshots;
- public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
+ /**
+ * TODO: Must create an 'Old Artifact' database.
+ * TODO: Base this off of an artifact table query instead.
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
+ public void processArtifact( final Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
adjustDistributionArtifactHandler( artifact );
catch ( ParseException e )
{
throw new IllegalStateException(
- "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
+ "Shouldn't match timestamp pattern and not be able to parse it: "
+ + m.group( 2 ) );
}
if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
{
- addNotice( reporter, artifact, "snapshot-expired-time",
- "The artifact is older than the maximum age of " + maxAge + " seconds." );
+ addNotice( artifact, "snapshot-expired-time", "The artifact is older than the maximum age of "
+ + maxAge + " seconds." );
}
else if ( maxSnapshots > 0 )
{
{
public boolean accept( File file, String string )
{
- return string.startsWith( artifact.getArtifactId() + "-" ) &&
- string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
+ return string.startsWith( artifact.getArtifactId() + "-" )
+ && string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
}
} );
- List/*<Integer>*/ buildNumbers = new ArrayList();
+ List/*<Integer>*/buildNumbers = new ArrayList();
Integer currentBuild = null;
for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
{
if ( buildNumbers.contains( currentBuild ) )
{
- addNotice( reporter, artifact, "snapshot-expired-count",
+ addNotice( artifact, "snapshot-expired-count",
"The artifact is older than the maximum number of retained snapshot builds." );
}
}
}
}
- private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addNotice( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addNotice( artifact, ROLE_HINT, problem, reason );
+ database.addNotice( artifact, ROLE_HINT, problem, reason );
}
private static void adjustDistributionArtifactHandler( Artifact artifact )
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultReportingStore
- extends AbstractLogEnabled
- implements ReportingStore
-{
- /**
- * The cached reports for given repositories.
- */
- private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
-
- public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException
- {
- String key = getKey( repository, reportGroup );
- ReportingDatabase database = (ReportingDatabase) reports.get( key );
-
- if ( database == null )
- {
- ReportingXpp3Reader reader = new ReportingXpp3Reader();
-
- File file = getReportFilename( repository, reportGroup );
-
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( file );
- }
- catch ( FileNotFoundException e )
- {
- database = new ReportingDatabase( reportGroup, repository );
- }
-
- if ( database == null )
- {
- getLogger().info( "Reading report database from " + file );
- try
- {
- Reporting reporting = reader.read( fileReader, false );
- database = new ReportingDatabase( reportGroup, reporting, repository );
- }
- catch ( IOException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- catch ( XmlPullParserException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtils.closeQuietly( fileReader );
- }
- }
-
- reports.put( key, database );
- }
- return database;
- }
-
- private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
- {
- return repository.getId() + "/" + reportGroup.getFilename();
- }
-
- private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
- {
- return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
- }
-
- public void storeReports( ReportingDatabase database, ArtifactRepository repository )
- throws ReportingStoreException
- {
- database.updateTimings();
-
- ReportingXpp3Writer writer = new ReportingXpp3Writer();
-
- File file = getReportFilename( repository, database.getReportGroup() );
- getLogger().info( "Writing reports to " + file );
- FileWriter fileWriter = null;
- try
- {
- file.getParentFile().mkdirs();
-
- fileWriter = new FileWriter( file );
- writer.write( fileWriter, database.getReporting() );
- }
- catch ( IOException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtils.closeQuietly( fileWriter );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * A component for loading the reporting database into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ReportingStore
-{
- /**
- * The Plexus role for the component.
- */
- String ROLE = ReportingStore.class.getName();
-
- /**
- * Get the reports from the store. A cached version may be used.
- *
- * @param repository the repository to load the reports for
- * @param reportGroup the report group to get the report for
- * @return the reporting database
- * @throws ReportingStoreException if there was a problem reading the store
- */
- ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException;
-
- /**
- * Save the reporting to the store.
- *
- * @param database the reports to store
- * @param repository the repositorry to store the reports in
- * @throws ReportingStoreException if there was a problem writing the store
- */
- void storeReports( ReportingDatabase database, ArtifactRepository repository )
- throws ReportingStoreException;
-
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring using the reporting store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ReportingStoreException
- extends Exception
-{
- public ReportingStoreException( String message )
- {
- super( message );
- }
-
- public ReportingStoreException( String message, Throwable e )
- {
- super( message, e );
- }
-}
+<?xml version="1.0" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
</default>
</defaults>
<classes>
- <class rootElement="true" xml.tagName="reporting">
+ <class rootElement="true" xml.tagName="reporting" stash.storable="false">
<name>Reporting</name>
<version>1.0.0</version>
<fields>
<multiplicity>*</multiplicity>
</association>
</field>
- <field xml.attribute="true">
- <name>lastModified</name>
- <version>1.0.0</version>
- <type>long</type>
- </field>
- <field xml.attribute="true">
- <name>executionTime</name>
- <version>1.0.0</version>
- <type>long</type>
- </field>
</fields>
</class>
- <class>
+ <class stash.storable="true"
+ jpox.use-identifiers-as-primary-key="true"
+ jpox.identity-type="application"
+ jpox.identity-class="ArtifactResultsKey">
<name>ArtifactResults</name>
<version>1.0.0</version>
<fields>
- <field>
- <name>failures</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>warnings</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>notices</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>groupId</name>
<identity>true</identity>
<version>1.0.0</version>
The group ID of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>artifactId</name>
<version>1.0.0</version>
<identity>true</identity>
The artifact ID of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>version</name>
<version>1.0.0</version>
<identity>true</identity>
The version of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>type</name>
<version>1.0.0</version>
<type>String</type>
The type of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>classifier</name>
<version>1.0.0</version>
<type>String</type>
The classifier of the artifact in the result.
</description>
</field>
- </fields>
- </class>
- <class>
- <name>MetadataResults</name>
- <version>1.0.0</version>
- <fields>
<field>
<name>failures</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
<field>
<name>warnings</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
<field>
<name>notices</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
</field>
- <field xml.attribute="true">
+ </fields>
+ </class>
+ <class stash.storable="true"
+ jpox.use-identifiers-as-primary-key="true"
+ jpox.identity-type="application"
+ jpox.identity-class="MetadataResultsKey">
+ <name>MetadataResults</name>
+ <version>1.0.0</version>
+ <fields>
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>groupId</name>
<version>1.0.0</version>
<type>String</type>
The group ID of the metadata in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>artifactId</name>
<version>1.0.0</version>
<type>String</type>
The artifact ID of the metadata in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>version</name>
<version>1.0.0</version>
<type>String</type>
The version of the metadata in the result.
</description>
</field>
+ <field>
+ <name>failures</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>warnings</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>notices</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
<field xml.attribute="true">
<name>lastModified</name>
<version>1.0.0</version>
</field>
</fields>
</class>
- <class>
+ <class stash.storable="true">
<name>Result</name>
<version>1.0.0</version>
<fields>
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.jpox.SchemaTool;
import java.io.File;
+import java.net.URL;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
/**
*
{
super.setUp();
+ setupJdoFactory();
+
File repositoryDirectory = getTestFile( "src/test/repository" );
factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
}
- protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId,
- String version )
+ protected void setupJdoFactory()
+ throws Exception
+ {
+ DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
+
+ jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); //$NON-NLS-1$
+
+ jdoFactory.setDriverName( "org.hsqldb.jdbcDriver" ); //$NON-NLS-1$
+
+ jdoFactory.setUrl( "jdbc:hsqldb:mem:" + getName() ); //$NON-NLS-1$
+
+ jdoFactory.setUserName( "sa" ); //$NON-NLS-1$
+
+ jdoFactory.setPassword( "" ); //$NON-NLS-1$
+
+ jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "javax.jdo.PersistenceManagerFactoryClass", "org.jpox.PersistenceManagerFactoryImpl" );
+
+ Properties properties = jdoFactory.getProperties();
+
+ for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
+ {
+ Map.Entry entry = (Map.Entry) it.next();
+
+ System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
+ }
+
+ SchemaTool.createSchemaTables( new URL[] { getClass()
+ .getResource( "/org/apache/maven/archiva/reporting/model/package.jdo" ) }, new URL[] {}, null, false, null ); //$NON-NLS-1$
+
+ PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
+
+ assertNotNull( pmf );
+
+ PersistenceManager pm = pmf.getPersistenceManager();
+
+ pm.close();
+ }
+
+ protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId, String version )
throws Exception
{
Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "jar" );
- artifact.setRepository(
- factory.createArtifactRepository( "repository", repository.toURL().toString(), layout, null, null ) );
+ artifact.setRepository( factory.createArtifactRepository( "repository", repository.toURL().toString(), layout,
+ null, null ) );
artifact.isSnapshot();
protected Artifact createArtifactWithClassifier( String groupId, String artifactId, String version, String type,
String classifier )
{
- Artifact artifact =
- artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+ Artifact artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
artifact.setRepository( repository );
return artifact;
}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * ArtifactResultsDatabaseTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsDatabaseTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private Artifact artifact;
+ private String processor, problem, reason;
+
+ private ArtifactResultsDatabase database;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+
+ artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
+ "classifier", null );
+ processor = "processor";
+ problem = "problem";
+ reason = "reason";
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( database );
+
+ super.tearDown();
+ }
+
+ public void testAddNoticeArtifactStringStringString()
+ {
+ database.addNotice( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, artifactResults.getNotices().size() );
+
+ database.addNotice( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, artifactResults.getNotices().size() );
+ }
+
+ public void testAddWarningArtifactStringStringString()
+ {
+ database.addWarning( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, artifactResults.getWarnings().size() );
+
+ database.addWarning( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, artifactResults.getWarnings().size() );
+ }
+
+ public void testAddFailureArtifactStringStringString()
+ {
+ database.addFailure( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, artifactResults.getFailures().size() );
+
+ database.addFailure( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, artifactResults.getFailures().size() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * MetadataResultsDatabaseTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsDatabaseTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private MetadataResultsDatabase database;
+
+ private RepositoryMetadata metadata;
+
+ private String processor, problem, reason;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+
+ Artifact artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope",
+ "type", "classifier", null );
+ metadata = new ArtifactRepositoryMetadata( artifact );
+
+ processor = "processor";
+ problem = "problem";
+ reason = "reason";
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( database );
+
+ super.tearDown();
+ }
+
+ public void testAddNoticeRepositoryMetadataStringStringString()
+ {
+ database.addNotice( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, metadataResults.getNotices().size() );
+
+ database.addNotice( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, metadataResults.getNotices().size() );
+ }
+
+ public void testAddWarningRepositoryMetadataStringStringString()
+ {
+ database.addWarning( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, metadataResults.getWarnings().size() );
+
+ database.addWarning( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, metadataResults.getWarnings().size() );
+ }
+
+ public void testAddFailureRepositoryMetadataStringStringString()
+ {
+ database.addFailure( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, metadataResults.getFailures().size() );
+
+ database.addFailure( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, metadataResults.getFailures().size() );
+ }
+}
* under the License.
*/
-import junit.framework.TestCase;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
/**
* Test for {@link ReportingDatabase}.
* @version $Id$
*/
public class ReportingDatabaseTest
- extends TestCase
+ extends AbstractRepositoryReportsTestCase
{
- private Artifact artifact;
-
- private String processor, problem, reason;
-
- private ReportingDatabase reportingDatabase;
-
- private RepositoryMetadata metadata;
+ private ReportingDatabase database;
protected void setUp()
throws Exception
{
super.setUp();
- artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
- "classifier", null );
- processor = "processor";
- problem = "problem";
- reason = "reason";
- reportingDatabase = new ReportingDatabase( null );
-
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
-
- public void testAddNoticeArtifactStringStringString()
- {
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, artifactResults.getNotices().size() );
-
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, artifactResults.getNotices().size() );
+ database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
}
- public void testAddWarningArtifactStringStringString()
- {
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, artifactResults.getWarnings().size() );
-
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, artifactResults.getWarnings().size() );
- }
-
- public void testAddFailureArtifactStringStringString()
- {
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, artifactResults.getFailures().size() );
-
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, artifactResults.getFailures().size() );
- }
-
- public void testAddNoticeRepositoryMetadataStringStringString()
- {
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, metadataResults.getNotices().size() );
-
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, metadataResults.getNotices().size() );
- }
-
- public void testAddWarningRepositoryMetadataStringStringString()
+ protected void tearDown()
+ throws Exception
{
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, metadataResults.getWarnings().size() );
-
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, metadataResults.getWarnings().size() );
+ release( database );
+ super.tearDown();
}
- public void testAddFailureRepositoryMetadataStringStringString()
+ public void testLookup()
{
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, metadataResults.getFailures().size() );
-
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, metadataResults.getFailures().size() );
+ assertNotNull( "database should not be null.", database );
+ assertNotNull( "database.artifactDatabase should not be null.", database.getArtifactDatabase() );
+ assertNotNull( "database.metadataDatabase should not be null.", database.getMetadataDatabase() );
}
}
* under the License.
*/
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.archiva.reporting.model.MetadataResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import java.util.Iterator;
+/**
+ * BadMetadataReportProcessorTest
+ *
+ * @version $Id$
+ */
public class BadMetadataReportProcessorTest
extends AbstractRepositoryReportsTestCase
{
private MetadataReportProcessor badMetadataReportProcessor;
- private ReportingDatabase reportingDatabase;
+ private MetadataResultsDatabase database;
protected void setUp()
throws Exception
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
}
+ protected void tearDown()
+ throws Exception
+ {
+ release( artifactFactory );
+ release( badMetadataReportProcessor );
+ super.tearDown();
+ }
+
public void testMetadataMissingLastUpdated()
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
if ( alpha1First )
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+ .getReason() );
}
else
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
}
result = (Result) failures.next();
if ( !alpha1First )
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+ .getReason() );
}
else
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
}
assertFalse( "check no more failures", failures.hasNext() );
}
public void testSnapshotMetadataMissingVersioning()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+ .getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
- result.getReason() );
+ assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository", result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason",
- "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
- result.getReason() );
+ assertEquals( "check reason", "Plugin snapshot-artifact is present in the repository but "
+ + "missing in the metadata.", result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3", result
+ .getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
public void testValidSnapshotMetadata()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
Snapshot snapshot = new Snapshot();
snapshot.setBuildNumber( 1 );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
public void testInvalidSnapshotMetadata()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
Snapshot snapshot = new Snapshot();
snapshot.setBuildNumber( 2 );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
Result result = (Result) failures.next();
assertMetadata( metadata, results );
// TODO: should be more robust
- assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
- result.getReason() );
+ assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
{
- assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
- assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
- assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getGroupId() ), results.getGroupId() );
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getArtifactId() ), results.getArtifactId() );
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getBaseVersion() ), results.getVersion() );
}
private Plugin createMetadataPlugin( String artifactId, String prefix )
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
private static final String VALID_VERSION = "1.0-alpha-1";
- private ReportingDatabase reportingDatabase;
+ private ArtifactResultsDatabase database;
private Model model;
{
super.setUp();
model = new Model();
- processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
-
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
}
public void testArtifactFoundButNoDirectDependencies()
{
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
private Artifact createValidArtifact()
{
- Artifact projectArtifact =
- artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+ Artifact projectArtifact = artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID,
+ VALID_VERSION );
projectArtifact.setRepository( repository );
return projectArtifact;
}
{
Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
artifact.setRepository( repository );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
private Dependency createValidDependency()
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithValidMultipleDependencies()
model.addDependency( dependency );
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithAnInvalidDependency()
model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Result result = (Result) failures.next();
- assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
- result.getReason() );
+ assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ), result.getReason() );
}
public void testValidArtifactWithInvalidDependencyGroupId()
Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithMissingDependencyVersion()
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.model.Model;
File indexDirectory;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
protected void setUp()
throws Exception
FileUtils.deleteDirectory( indexDirectory );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+
artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
- System.out.println( "artifact = " + artifact );
+
model = new Model();
RepositoryArtifactIndexFactory factory =
index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
public void testNullArtifactFile()
{
artifact.setFile( null );
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 1, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnAlreadyIndexedArtifact()
throws Exception
{
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnDifferentGroupId()
throws Exception
{
artifact.setGroupId( "different.groupId" );
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnNewArtifact()
{
Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
- processor.processArtifact( newArtifact, model, reportDatabase );
+ processor.processArtifact( newArtifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testFailure()
artifact.getVersion(), artifact.getType() );
duplicate.setFile( artifact.getFile() );
- processor.processArtifact( duplicate, model, reportDatabase );
+ processor.processArtifact( duplicate, model );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check no failures", 1, database.getNumFailures() );
}
private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
/**
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
}
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( pomArtifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, model );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ assertEquals( 1, database.getNumFailures() );
}
/**
Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
}
private Model readPom( String path )
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
}
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
public void testOldArtifact()
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
- ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 1, database.getNumNotices() );
+ ArtifactResults results = (ArtifactResults) database.getIterator().next();
assertEquals( artifact.getArtifactId(), results.getArtifactId() );
assertEquals( artifact.getGroupId(), results.getGroupId() );
assertEquals( artifact.getVersion(), results.getVersion() );
Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testMissingArtifact()
try
{
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, null );
fail( "Should not have passed" );
}
catch ( IllegalStateException e )
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
private File tempRepository;
throws Exception
{
super.setUp();
- artifactReportProcessor =
- (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE,
+ "old-snapshot-artifact" );
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
- reportDatabase = new ReportingDatabase( reportGroup );
tempRepository = getTestFile( "target/test-repository" );
FileUtils.deleteDirectory( tempRepository );
}
{
Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
- Iterator artifactIterator = reportDatabase.getArtifactIterator();
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 1, database.getNumNotices() );
+ Iterator artifactIterator = database.getIterator();
assertArtifactResults( artifactIterator, artifact );
}
{
Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testNonSnapshotArtifact()
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testNewSnapshotArtifact()
String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
FileUtils.writeStringToFile( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ), "foo", null );
- Artifact artifact =
- createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
+ Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date
+ + "-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testTooManySnapshotArtifact()
for ( int i = 1; i <= 5; i++ )
{
- Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
- "1.0-alpha-1-" + date + "-" + i );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", "1.0-alpha-1-"
+ + date + "-" + i );
+ artifactReportProcessor.processArtifact( artifact, null );
}
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
- Iterator artifactIterator = reportDatabase.getArtifactIterator();
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 3, database.getNumNotices() );
+ Iterator artifactIterator = database.getIterator();
for ( int i = 1; i <= 3; i++ )
{
String version = "1.0-alpha-1-" + date + "-" + i;
try
{
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, null );
fail( "Should not have passed" );
}
catch ( IllegalStateException e )
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
-import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
import org.codehaus.plexus.digest.DigesterException;
-import java.io.File;
import java.io.IOException;
-import java.util.Iterator;
/**
* This class tests the ChecksumArtifactReportProcessor.
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportingDatabase;
-
- private MetadataReportProcessor metadataReportProcessor;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
- metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
}
/**
Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
}
/**
String s1 = "1.0";
Artifact artifact = createArtifact( "checksumTest", s, s1 );
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
- }
-
- /**
- * Test the valid checksum of a metadata file.
- * The reportingDatabase should report 2 success validation.
- */
- public void testChecksumMetadataReporterSuccess()
- throws DigesterException, IOException
- {
- createMetadataFile( "VALID" );
- createMetadataFile( "INVALID" );
-
- Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
- //Version level metadata
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- //Artifact level metadata
- metadata = new ArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- //Group level metadata
- metadata = new GroupRepositoryMetadata( "checksumTest" );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
- }
-
- /**
- * Test the corrupted checksum of a metadata file.
- * The reportingDatabase must report 2 failures.
- */
- public void testChecksumMetadataReporterFailure()
- {
- Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- Iterator failures = reportingDatabase.getMetadataIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- MetadataResults results = (MetadataResults) failures.next();
- failures = results.getFailures().iterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- }
-
- /**
- * Test the conditional when the checksum files of the artifact & metadata do not exist.
- */
- public void testChecksumFilesDoNotExist()
- throws DigesterException, IOException
- {
- createChecksumFile( "VALID" );
- createMetadataFile( "VALID" );
- deleteChecksumFiles( "jar" );
-
- Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- Iterator failures = reportingDatabase.getMetadataIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- MetadataResults results = (MetadataResults) failures.next();
- failures = results.getFailures().iterator();
- assertTrue( "check there is a failure", failures.hasNext() );
-
- deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
}
}
--- /dev/null
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * ChecksumMetadataReporterTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ChecksumMetadataReporterTest
+ extends AbstractChecksumArtifactReporterTestCase
+{
+ private ArtifactReportProcessor artifactReportProcessor;
+
+ private MetadataReportProcessor metadataReportProcessor;
+
+ private MetadataResultsDatabase database;
+
+ private ArtifactResultsDatabase artifactsDatabase;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+ metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+ artifactsDatabase = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ }
+
+ /**
+ * Test the valid checksum of a metadata file.
+ * The reportingDatabase should report 2 success validation.
+ */
+ public void testChecksumMetadataReporterSuccess()
+ throws DigesterException, IOException
+ {
+ createMetadataFile( "VALID" );
+ createMetadataFile( "INVALID" );
+
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+ //Version level metadata
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ //Artifact level metadata
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ //Group level metadata
+ metadata = new GroupRepositoryMetadata( "checksumTest" );
+ metadataReportProcessor.processMetadata( metadata, repository );
+ }
+
+ /**
+ * Test the corrupted checksum of a metadata file.
+ * The reportingDatabase must report 2 failures.
+ */
+ public void testChecksumMetadataReporterFailure()
+ {
+ Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ Iterator failures = database.getIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ }
+
+ /**
+ * Test the conditional when the checksum files of the artifact & metadata do not exist.
+ */
+ public void testChecksumFilesDoNotExist()
+ throws DigesterException, IOException
+ {
+ createChecksumFile( "VALID" );
+ createMetadataFile( "VALID" );
+ deleteChecksumFiles( "jar" );
+
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, artifactsDatabase.getNumFailures() );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ Iterator failures = database.getIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+
+ deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+ }
+
+}
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.MetadataResults;
import org.apache.maven.archiva.reporting.model.Result;
import java.util.Iterator;
/**
+ * DefaultArtifactReporterTest
*
+ * @version $Id$
*/
public class DefaultArtifactReporterTest
extends AbstractRepositoryReportsTestCase
{
- private ReportingDatabase reportingDatabase;
+ private ReportingDatabase database;
private RepositoryMetadata metadata;
private Artifact artifact;
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
+
+ ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+
+ metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+ }
+
public void testEmptyArtifactReporter()
{
- assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
- assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
- assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
- assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
+ assertEquals( "No failures", 0, database.getNumFailures() );
+ assertEquals( "No warnings", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
+ assertFalse( "No artifact failures", database.getArtifactIterator().hasNext() );
+ assertFalse( "No metadata failures", database.getMetadataIterator().hasNext() );
}
public void testMetadataSingleFailure()
{
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
- assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
+ assertEquals( "failures count", 1, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
public void testMetadataMultipleFailures()
{
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
- assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
+ assertEquals( "failures count", 2, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
public void testMetadataSingleWarning()
{
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 1, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testMetadataMultipleWarnings()
{
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 2, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testMetadataSingleNotice()
{
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
- assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
+ assertEquals( "failure count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check notices", 1, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getNotices().iterator();
public void testMetadataMultipleNotices()
{
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 2, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getNotices().iterator();
public void testArtifactSingleFailure()
{
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
- assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
+ assertEquals( "failures count", 1, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getArtifactIterator();
assertTrue( "check there is a failure", failures.hasNext() );
ArtifactResults results = (ArtifactResults) failures.next();
failures = results.getFailures().iterator();
public void testArtifactMultipleFailures()
{
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
- assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
+ assertEquals( "failures count", 2, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getArtifactIterator();
assertTrue( "check there is a failure", failures.hasNext() );
ArtifactResults results = (ArtifactResults) failures.next();
failures = results.getFailures().iterator();
public void testArtifactSingleWarning()
{
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 1, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testArtifactMultipleWarnings()
{
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 2, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testArtifactSingleNotice()
{
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
- assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
+ assertEquals( "failure count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check notices", 1, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getNotices().iterator();
public void testArtifactMultipleNotices()
{
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 2, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getNotices().iterator();
assertFalse( "no more warnings", warnings.hasNext() );
}
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
-
- metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
- }
}
--- /dev/null
+<component-set>
+ <components>
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+ </component>
+ </components>
+</component-set>
--- /dev/null
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
<requirement>
<role>org.codehaus.plexus.digest.Digester</role>
<role-hint>md5</role-hint>
+ <field-name>digester</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
+ <field-name>indexFactory</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
</requirement>
</requirements>
<configuration>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
+ </requirement>
+ </requirements>
<configuration>
<maxAge>10</maxAge>
</configuration>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-snapshot-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
+ </requirement>
+ </requirements>
<configuration>
<maxAge>3600</maxAge>
<maxSnapshots>2</maxSnapshots>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
+ <version>10.1.3.1</version>
<scope>provided</scope>
</dependency>
<dependency>
/**
* AuditLog - Audit Log.
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.web.repository.AuditLog"
/**
* ProxiedDavServer
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.codehaus.plexus.webdav.DavServerComponent"
* role-hint="proxied"
/**
* RepositoryServlet
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryServlet
/**
* DownloadArtifact
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="com.opensymphony.webwork.components.Component" role-hint="download-artifact"
/**
* DownloadArtifactTag
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class DownloadArtifactTag
/**
* ExpressionTool
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ExpressionTool
/**
* GroupIdLink
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class GroupIdLink
/**
* GroupIdLink
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class GroupIdLinkTag
/**
* PlexusTagUtil
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class PlexusTagUtil
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
</component>
</load-on-start>
</plexus>
</snapshots>
</repository>
</repositories>
+ <pluginRepositories>
+ <pluginRepository>
+ <id>codehaus.org</id>
+ <url>http://snapshots.repository.codehaus.org</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>true</enabled>
+ </snapshots>
+ </pluginRepository>
+ </pluginRepositories>
<properties>
<maven.version>2.0.4</maven.version>
<wagon.version>1.0-beta-2</wagon.version>