<dependencies>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-core</artifactId>
+ <artifactId>archiva-converter</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.tools.cli.AbstractCli;
System.out.println( "Converting " + oldRepositoryPath + " to " + newRepositoryPath );
- List blacklistedPatterns = null;
+ List fileExclusionPatterns = null;
String s = p.getProperty( BLACKLISTED_PATTERNS );
if ( s != null )
{
- blacklistedPatterns = Arrays.asList( StringUtils.split( s, "," ) );
+ fileExclusionPatterns = Arrays.asList( StringUtils.split( s, "," ) );
}
try
{
legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath,
- blacklistedPatterns, true );
+ fileExclusionPatterns,
+ true );
}
catch ( RepositoryConversionException e )
{
showFatalError( "Error converting repository.", e, true );
}
- catch ( DiscovererException e )
- {
- showFatalError( "Error discovery artifacts to convert.", e, true );
- }
}
}
}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>archiva</artifactId>
+ <groupId>org.apache.maven.archiva</groupId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>archiva-common</artifactId>
+ <name>Archiva Common</name>
+ <dependencies>
+ <!-- TO OTHER DEVELOPERS:
+ This module should depend on NO OTHER ARCHIVA MODULES.
+ If you feel tempted to add one, discuss it first in the
+ archiva-dev@maven.apache.org mailing-list.
+ joakime@apache.org
+ -->
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-component-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact-manager</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-project</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-container-default</artifactId>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <!--
+ <plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>test-jar</id>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ -->
+ <plugin>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-maven-plugin</artifactId>
+ <!--
+ <executions>
+ <execution>
+ <id>merge</id>
+ <goals>
+ <goal>merge-descriptors</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+ <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
+ -->
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ArchivaException
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArchivaException
+ extends Exception
+{
+ public ArchivaException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public ArchivaException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+/**
+ * AbstractLayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ /**
+ * @plexus.requirement
+ */
+ protected ArtifactFactory artifactFactory;
+
+ /**
+ * Constructor used by plexus
+ */
+ public AbstractLayoutArtifactBuilder()
+ {
+
+ }
+
+ /**
+ * Constructor used by manual process.
+ *
+ * @param artifactFactory the artifact factory to use.
+ */
+ public AbstractLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ this.artifactFactory = artifactFactory;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.ArchivaException;
+
+/**
+ * BuilderException - used to indicate a problem during the building of an object from file.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BuilderException
+ extends ArchivaException
+{
+
+ public BuilderException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public BuilderException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * DefaultLayoutArtifactBuilder - artifact builder for default layout repositories.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder"
+ * role-hint="default"
+ */
+public class DefaultLayoutArtifactBuilder
+ extends AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ public DefaultLayoutArtifactBuilder()
+ {
+ super();
+ }
+
+ public DefaultLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ super( artifactFactory );
+ }
+
+ public Artifact build( String pathToArtifact )
+ throws BuilderException
+ {
+ if( artifactFactory == null )
+ {
+ throw new IllegalStateException( "Unable to build artifact with a null artifactFactory." );
+ }
+
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( pathToArtifact, "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+
+ Artifact artifact;
+ if ( pathParts.size() >= 4 )
+ {
+ // maven 2.x path
+
+ // the actual artifact filename.
+ String filename = (String) pathParts.remove( 0 );
+
+ // the next one is the version.
+ String version = (String) pathParts.remove( 0 );
+
+ // the next one is the artifactId.
+ String artifactId = (String) pathParts.remove( 0 );
+
+ // the remaining are the groupId.
+ Collections.reverse( pathParts );
+ String groupId = StringUtils.join( pathParts.iterator(), "." );
+
+ String remainingFilename = filename;
+ if ( remainingFilename.startsWith( artifactId + "-" ) )
+ {
+ remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
+
+ String classifier = null;
+
+ // TODO: use artifact handler, share with legacy discoverer
+ String type;
+ if ( remainingFilename.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+ remainingFilename = remainingFilename
+ .substring( 0, remainingFilename.length() - ".tar.gz".length() );
+ }
+ else if ( remainingFilename.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
+ }
+ else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
+ {
+ type = "java-source";
+ classifier = "test-sources";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+ - "-test-sources.jar".length() );
+ }
+ else if ( remainingFilename.endsWith( "-sources.jar" ) )
+ {
+ type = "java-source";
+ classifier = "sources";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length()
+ - "-sources.jar".length() );
+ }
+ else
+ {
+ int index = remainingFilename.lastIndexOf( "." );
+ if ( index >= 0 )
+ {
+ type = remainingFilename.substring( index + 1 );
+ remainingFilename = remainingFilename.substring( 0, index );
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not have an extension." );
+ }
+ }
+
+ Artifact result;
+ if ( classifier == null )
+ {
+ result = artifactFactory
+ .createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
+ }
+ else
+ {
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
+ }
+
+ if ( result.isSnapshot() )
+ {
+ // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
+ int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
+ if ( classifierIndex >= 0 )
+ {
+ classifier = remainingFilename.substring( classifierIndex + 1 );
+ remainingFilename = remainingFilename.substring( 0, classifierIndex );
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+ type, classifier );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+
+ // poor encapsulation requires we do this to populate base version
+ if ( !result.isSnapshot() )
+ {
+ throw new BuilderException( "Failed to create a snapshot artifact: " + result );
+ }
+ else if ( !result.getBaseVersion().equals( version ) )
+ {
+ throw new BuilderException(
+ "Built snapshot artifact base version does not match path version: "
+ + result.getBaseVersion() + "; should have been version: "
+ + version );
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else if ( !remainingFilename.startsWith( version ) )
+ {
+ throw new BuilderException( "Built artifact version does not match path version" );
+ }
+ else if ( !remainingFilename.equals( version ) )
+ {
+ if ( remainingFilename.charAt( version.length() ) == '-' )
+ {
+ classifier = remainingFilename.substring( version.length() + 1 );
+ artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
+ }
+ else
+ {
+ throw new BuilderException( "Path version does not corresspond to an artifact version" );
+ }
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not correspond to an artifact." );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path is too short to build an artifact from." );
+ }
+
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+/**
+ * LayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @todo this concept should really exist inside of the {@link ArtifactRepositoryLayout} object in maven itself.
+ */
+public interface LayoutArtifactBuilder
+{
+ public Artifact build( String pathToArtifact ) throws BuilderException;
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+
+/**
+ * LegacyLayoutArtifactBuilder
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder"
+ * role-hint="legacy"
+ */
+public class LegacyLayoutArtifactBuilder
+ extends AbstractLayoutArtifactBuilder
+ implements LayoutArtifactBuilder
+{
+ public LegacyLayoutArtifactBuilder()
+ {
+ super();
+ }
+
+ public LegacyLayoutArtifactBuilder( ArtifactFactory artifactFactory )
+ {
+ super( artifactFactory );
+ }
+
+ public Artifact build( String pathToArtifact )
+ throws BuilderException
+ {
+ if( artifactFactory == null )
+ {
+ throw new IllegalStateException( "Unable to build legacy artifact with a null artifactFactory." );
+ }
+
+ StringTokenizer tokens = new StringTokenizer( pathToArtifact, "/\\" );
+
+ Artifact result;
+
+ int numberOfTokens = tokens.countTokens();
+
+ if ( numberOfTokens == 3 )
+ {
+ String groupId = tokens.nextToken();
+
+ String type = tokens.nextToken();
+
+ if ( type.endsWith( "s" ) )
+ {
+ type = type.substring( 0, type.length() - 1 );
+
+ // contains artifactId, version, classifier, and extension.
+ String avceGlob = tokens.nextToken();
+
+ //noinspection CollectionDeclaredAsConcreteClass
+ LinkedList avceTokenList = new LinkedList();
+
+ StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
+ while ( avceTokenizer.hasMoreTokens() )
+ {
+ avceTokenList.addLast( avceTokenizer.nextToken() );
+ }
+
+ String lastAvceToken = (String) avceTokenList.removeLast();
+
+ // TODO: share with other discoverer, use artifact handlers instead
+ if ( lastAvceToken.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( "sources.jar" ) )
+ {
+ type = "java-source";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
+ {
+ type = "javadoc.jar";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ int extPos = lastAvceToken.lastIndexOf( '.' );
+
+ if ( extPos > 0 )
+ {
+ String ext = lastAvceToken.substring( extPos + 1 );
+ if ( type.equals( ext ) || "plugin".equals( type ) )
+ {
+ lastAvceToken = lastAvceToken.substring( 0, extPos );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ throw new BuilderException( "Path type does not match the extension" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename does not have an extension" );
+ }
+ }
+
+ // let's discover the version, and whatever's leftover will be either
+ // a classifier, or part of the artifactId, depending on position.
+ // Since version is at the end, we have to move in from the back.
+ Collections.reverse( avceTokenList );
+
+ // TODO: this is obscene - surely a better way?
+ String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+ + "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+ + "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+ + "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+ + "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+ + "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+ + "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
+
+ StringBuffer classifierBuffer = new StringBuffer();
+ StringBuffer versionBuffer = new StringBuffer();
+
+ boolean firstVersionTokenEncountered = false;
+ boolean firstToken = true;
+
+ int tokensIterated = 0;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ boolean tokenIsVersionPart = token.matches( validVersionParts );
+
+ StringBuffer bufferToUpdate;
+
+ // NOTE: logic in code is reversed, since we're peeling off the back
+ // Any token after the last versionPart will be in the classifier.
+ // Any token UP TO first non-versionPart is part of the version.
+ if ( !tokenIsVersionPart )
+ {
+ if ( firstVersionTokenEncountered )
+ {
+ //noinspection BreakStatement
+ break;
+ }
+ else
+ {
+ bufferToUpdate = classifierBuffer;
+ }
+ }
+ else
+ {
+ firstVersionTokenEncountered = true;
+
+ bufferToUpdate = versionBuffer;
+ }
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ bufferToUpdate.insert( 0, '-' );
+ }
+
+ bufferToUpdate.insert( 0, token );
+
+ tokensIterated++;
+ }
+
+ // Now, restore the proper ordering so we can build the artifactId.
+ Collections.reverse( avceTokenList );
+
+ // if we didn't find a version, then punt. Use the last token
+ // as the version, and set the classifier empty.
+ if ( versionBuffer.length() < 1 )
+ {
+ if ( avceTokenList.size() > 1 )
+ {
+ int lastIdx = avceTokenList.size() - 1;
+
+ versionBuffer.append( avceTokenList.get( lastIdx ) );
+ avceTokenList.remove( lastIdx );
+ }
+
+ classifierBuffer.setLength( 0 );
+ }
+ else
+ {
+ // if everything is kosher, then pop off all the classifier and
+ // version tokens, leaving the naked artifact id in the list.
+ avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
+ }
+
+ StringBuffer artifactIdBuffer = new StringBuffer();
+
+ firstToken = true;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ artifactIdBuffer.append( '-' );
+ }
+
+ artifactIdBuffer.append( token );
+ }
+
+ String artifactId = artifactIdBuffer.toString();
+
+ if ( artifactId.length() > 0 )
+ {
+ int lastVersionCharIdx = versionBuffer.length() - 1;
+ if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
+ {
+ versionBuffer.setLength( lastVersionCharIdx );
+ }
+
+ String version = versionBuffer.toString();
+
+ if ( version.length() > 0 )
+ {
+ if ( classifierBuffer.length() > 0 )
+ {
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifierBuffer.toString() );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, version,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename version is empty" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path filename artifactId is empty" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path artifact type does not corresspond to an artifact type" );
+ }
+ }
+ else
+ {
+ throw new BuilderException( "Path does not match a legacy repository path for an artifact" );
+ }
+
+ return result;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * ManagedArtifact
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedArtifact
+{
+ private String repositoryId;
+
+ private Artifact artifact;
+
+ private String path;
+
+ protected Map attached;
+
+ public ManagedArtifact( String repoId, Artifact artifact, String path )
+ {
+ super();
+ this.repositoryId = repoId;
+ this.artifact = artifact;
+ this.path = path;
+ this.attached = new HashMap();
+ }
+
+ public Artifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public String getPath()
+ {
+ return path;
+ }
+
+ public String getRepositoryId()
+ {
+ return repositoryId;
+ }
+
+ public Map getAttached()
+ {
+ return attached;
+ }
+
+ public void setAttached( Map attached )
+ {
+ this.attached = attached;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * ManagedArtifactTypes - provides place to test an unknown artifact type.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedArtifactTypes
+{
+ public static final int GENERIC = 0;
+
+ public static final int JAVA = 1;
+
+ public static final int EJB = 2;
+
+ private static List javaArtifacts;
+
+ private static List ejbArtifacts;
+
+ static
+ {
+ javaArtifacts = new ArrayList();
+ javaArtifacts.add( "jar" );
+ javaArtifacts.add( "war" );
+ javaArtifacts.add( "sar" );
+ javaArtifacts.add( "rar" );
+ javaArtifacts.add( "ear" );
+
+ ejbArtifacts = new ArrayList();
+ ejbArtifacts.add( "ejb" );
+ ejbArtifacts.add( "ejb-client" );
+ }
+
+ public static int whichType( String type )
+ {
+ if ( StringUtils.isBlank( type ) )
+ {
+ // TODO: is an empty type even possible?
+ return GENERIC;
+ }
+
+ type = type.toLowerCase();
+
+ if ( ejbArtifacts.contains( type ) )
+ {
+ return EJB;
+ }
+
+ if ( javaArtifacts.contains( type ) )
+ {
+ return JAVA;
+ }
+
+ return GENERIC;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedEjbArtifact
+ extends ManagedJavaArtifact
+{
+ public static final String CLIENT = "client";
+
+ public ManagedEjbArtifact( String repoId, Artifact artifact, String path )
+ {
+ super( repoId, artifact, path );
+ }
+
+ public String getClientPath()
+ {
+ return (String) super.attached.get( CLIENT );
+ }
+
+ public void setClientPath( String clientPath )
+ {
+ super.attached.put( CLIENT, clientPath );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.managed;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source
+ * reference jars.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ManagedJavaArtifact
+ extends ManagedArtifact
+{
+ public static final String JAVADOC = "javadoc";
+
+ public static final String SOURCES = "sources";
+
+ public ManagedJavaArtifact( String repoId, Artifact artifact, String path )
+ {
+ super( repoId, artifact, path );
+ }
+
+ public String getJavadocPath()
+ {
+ return (String) super.attached.get( JAVADOC );
+ }
+
+ public void setJavadocPath( String javadocPath )
+ {
+ super.attached.put( JAVADOC, javadocPath );
+ }
+
+ public String getSourcesPath()
+ {
+ return (String) super.attached.get( SOURCES );
+ }
+
+ public void setSourcesPath( String sourcesPath )
+ {
+ super.attached.put( SOURCES, sourcesPath );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * AbstractDiscovererConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractConsumer
+ extends AbstractLogEnabled
+ implements Consumer
+{
+ /**
+ * @plexus.requirement
+ */
+ protected ArtifactFactory artifactFactory;
+
+ protected ArtifactRepository repository;
+
+ protected AbstractConsumer()
+ {
+ /* do nothing */
+ }
+
+ public List getExcludePatterns()
+ {
+ return Collections.EMPTY_LIST;
+ }
+
+ public boolean init( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ return isEnabled();
+ }
+
+ protected boolean isEnabled()
+ {
+ return true;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * DiscovererConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface Consumer
+{
+ public static final String ROLE = Consumer.class.getName();
+
+ /**
+ * This is the human readable name for the discoverer.
+ *
+ * @return the human readable discoverer name.
+ */
+ public String getName();
+
+ /**
+ * This is used to initialize any internals in the consumer before it is used.
+ *
+ * This method is called by the internals of archiva and is not meant to be used by other developers.
+ * This method is called once per repository.
+ *
+ * @param repository the repository to initialize the consumer against.
+ * @return true if the repository is valid for this consumer. false will result in consumer being disabled
+ * for the provided repository.
+ */
+ public boolean init( ArtifactRepository repository );
+
+ /**
+ * Get the List of excluded file patterns for this consumer.
+ *
+ * @return the list of excluded file patterns for this consumer.
+ */
+ public List getExcludePatterns();
+
+ /**
+ * Get the List of included file patterns for this consumer.
+ *
+ * @return the list of included file patterns for this consumer.
+ */
+ public List getIncludePatterns();
+
+ /**
+ * Called by archiva framework to indicate that there is a file suitable for consuming,
+ * This method will only be called if the {@link #init(ArtifactRepository)} and {@link #getExcludePatterns()}
+ * and {@link #getIncludePatterns()} all pass for this consumer.
+ *
+ * @param file the file to process.
+ * @throws ConsumerException if there was a problem processing this file.
+ */
+ public void processFile( BaseFile file ) throws ConsumerException;
+
+ /**
+ * Called by archiva framework to indicate that there has been a problem detected
+ * on a specific file.
+ *
+ * NOTE: It is very possible for 1 file to have more than 1 problem associated with it.
+ *
+ * @param file the file to process.
+ * @param message the message describing the problem.
+ */
+ public void processFileProblem( BaseFile file, String message );
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.ArchivaException;
+import org.apache.maven.archiva.common.utils.BaseFile;
+
+/**
+ * ConsumerException - details about the failure of a consumer.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ConsumerException
+ extends ArchivaException
+{
+ private BaseFile file;
+
+ public ConsumerException( BaseFile file, String message, Throwable cause )
+ {
+ super( message, cause );
+ this.file = file;
+ }
+
+ public ConsumerException( BaseFile file, String message )
+ {
+ super( message );
+ this.file = file;
+ }
+
+ public BaseFile getFile()
+ {
+ return file;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.PlexusConstants;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.context.Context;
+import org.codehaus.plexus.context.ContextException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
+
+/**
+ * DiscovererConsumerFactory - factory for consumers.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.ConsumerFactory"
+ */
+public class ConsumerFactory
+ extends AbstractLogEnabled
+ implements Contextualizable
+{
+ public static final String ROLE = ConsumerFactory.class.getName();
+
+ private PlexusContainer container;
+
+ public Consumer createConsumer( String name )
+ throws ConsumerException
+ {
+ getLogger().info( "Attempting to create consumer [" + name + "]" );
+
+ Consumer consumer;
+ try
+ {
+ consumer = (Consumer) container.lookup( Consumer.ROLE, container.getLookupRealm() );
+ }
+ catch ( Throwable t )
+ {
+ String emsg = "Unable to create consumer [" + name + "]: " + t.getMessage();
+ getLogger().warn( t.getMessage(), t );
+ throw new ConsumerException( null, emsg, t );
+ }
+
+ getLogger().info( "Created consumer [" + name + "|" + consumer.getName() + "]" );
+ return consumer;
+ }
+
+ public void contextualize( Context context )
+ throws ContextException
+ {
+ container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.artifact.builder.BuilderException;
+import org.apache.maven.archiva.common.artifact.builder.DefaultLayoutArtifactBuilder;
+import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
+import org.apache.maven.archiva.common.artifact.builder.LegacyLayoutArtifactBuilder;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * DefaultArtifactConsumer
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericArtifactConsumer
+ extends AbstractConsumer
+ implements Consumer
+{
+ public abstract void processArtifact( Artifact artifact, BaseFile file );
+
+ private Map artifactBuilders = new HashMap();
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/*.pom" );
+ includePatterns.add( "**/*.jar" );
+ includePatterns.add( "**/*.war" );
+ includePatterns.add( "**/*.ear" );
+ includePatterns.add( "**/*.sar" );
+ includePatterns.add( "**/*.zip" );
+ includePatterns.add( "**/*.gz" );
+ includePatterns.add( "**/*.bz2" );
+ }
+
+ private String layoutId = "default";
+
+ public boolean init( ArtifactRepository repository )
+ {
+ this.artifactBuilders.clear();
+ this.artifactBuilders.put( "default", new DefaultLayoutArtifactBuilder( artifactFactory ) );
+ this.artifactBuilders.put( "legacy", new LegacyLayoutArtifactBuilder( artifactFactory ) );
+
+ if ( repository.getLayout() instanceof LegacyRepositoryLayout )
+ {
+ this.layoutId = "legacy";
+ }
+
+ return super.init( repository );
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public boolean isEnabled()
+ {
+ ArtifactRepositoryLayout layout = repository.getLayout();
+ return ( layout instanceof DefaultRepositoryLayout ) || ( layout instanceof LegacyRepositoryLayout );
+ }
+
+ public void processFile( BaseFile file )
+ throws ConsumerException
+ {
+ if ( file.length() <= 0 )
+ {
+ processFileProblem( file, "File is empty." );
+ }
+
+ if ( !file.canRead() )
+ {
+ processFileProblem( file, "Not allowed to read file due to permission settings on file." );
+ }
+
+ try
+ {
+ Artifact artifact = buildArtifact( file );
+
+ processArtifact( artifact, file );
+ }
+ catch ( BuilderException e )
+ {
+ throw new ConsumerException( file, e.getMessage(), e );
+ }
+ }
+
+ private Artifact buildArtifact( BaseFile file )
+ throws BuilderException
+ {
+ LayoutArtifactBuilder builder = (LayoutArtifactBuilder) artifactBuilders.get( layoutId );
+
+ Artifact artifact = builder.build( file.getRelativePath() );
+ artifact.setRepository( repository );
+ artifact.setFile( file );
+
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GenericModelConsumer - consumer for pom files.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericModelConsumer
+ extends AbstractConsumer
+ implements Consumer
+{
+ public abstract void processModel( Model model, BaseFile file );
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/*.pom" );
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public boolean isEnabled()
+ {
+ return true;
+ }
+
+ public void processFile( BaseFile file )
+ throws ConsumerException
+ {
+ Model model = buildModel( file );
+ processModel( model, file );
+ }
+
+ private Model buildModel( BaseFile file )
+ throws ConsumerException
+ {
+ Model model;
+ Reader reader = null;
+ try
+ {
+ reader = new FileReader( file );
+ MavenXpp3Reader modelReader = new MavenXpp3Reader();
+
+ model = modelReader.read( reader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ConsumerException( file, "Error parsing metadata file: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new ConsumerException( file, "Error reading metadata file: " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( reader );
+ }
+
+ return model;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * GenericRepositoryMetadataConsumer - Consume any maven-metadata.xml files as {@link RepositoryMetadata} objects.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class GenericRepositoryMetadataConsumer
+ extends AbstractConsumer
+ implements Consumer
+{
+ public abstract void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file );
+
+ private static final List includePatterns;
+
+ static
+ {
+ includePatterns = new ArrayList();
+ includePatterns.add( "**/maven-metadata.xml" );
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public boolean isEnabled()
+ {
+ // the RepositoryMetadata objects only exist in 'default' layout repositories.
+ ArtifactRepositoryLayout layout = repository.getLayout();
+ return ( layout instanceof DefaultRepositoryLayout );
+ }
+
+ public void processFile( BaseFile file )
+ throws ConsumerException
+ {
+ if ( file.length() <= 0 )
+ {
+ throw new ConsumerException( file, "File is empty." );
+ }
+
+ if ( !file.canRead() )
+ {
+ throw new ConsumerException( file, "Not allowed to read file due to permission settings on file." );
+ }
+
+ RepositoryMetadata metadata = buildMetadata( file );
+ processRepositoryMetadata( metadata, file );
+ }
+
+ private RepositoryMetadata buildMetadata( BaseFile metadataFile )
+ throws ConsumerException
+ {
+ Metadata m;
+ Reader reader = null;
+ try
+ {
+ reader = new FileReader( metadataFile );
+ MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
+
+ m = metadataReader.read( reader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ConsumerException( metadataFile, "Error parsing metadata file: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new ConsumerException( metadataFile, "Error reading metadata file: " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( reader );
+ }
+
+ RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataFile );
+
+ if ( repositoryMetadata == null )
+ {
+ throw new ConsumerException( metadataFile, "Unable to build a repository metadata from path." );
+ }
+
+ return repositoryMetadata;
+ }
+
+ /**
+ * Builds a RepositoryMetadata object from a Metadata object and its path.
+ *
+ * @param m Metadata
+ * @param metadataFile file information
+ * @return RepositoryMetadata if the parameters represent one; null if not
+ * @throws ConsumerException
+ */
+ private RepositoryMetadata buildMetadata( Metadata m, BaseFile metadataFile )
+ throws ConsumerException
+ {
+ if ( artifactFactory == null )
+ {
+ throw new IllegalStateException( "Unable to build metadata with a null artifactFactory." );
+ }
+
+ String metaGroupId = m.getGroupId();
+ String metaArtifactId = m.getArtifactId();
+ String metaVersion = m.getVersion();
+
+ // check if the groupId, artifactId and version is in the
+ // metadataPath
+ // parse the path, in reverse order
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( metadataFile.getRelativePath(), "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+ // remove the metadata file
+ pathParts.remove( 0 );
+ Iterator it = pathParts.iterator();
+ String tmpDir = (String) it.next();
+
+ Artifact artifact = null;
+ if ( StringUtils.isNotEmpty( metaVersion ) )
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
+ }
+
+ // snapshotMetadata
+ RepositoryMetadata metadata = null;
+ if ( tmpDir != null && tmpDir.equals( metaVersion ) )
+ {
+ if ( artifact != null )
+ {
+ metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
+ {
+ // artifactMetadata
+ if ( artifact != null )
+ {
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ else
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else
+ {
+ String groupDir = "";
+ int ctr = 0;
+ for ( it = pathParts.iterator(); it.hasNext(); )
+ {
+ String path = (String) it.next();
+ if ( ctr == 0 )
+ {
+ groupDir = path;
+ }
+ else
+ {
+ groupDir = path + "." + groupDir;
+ }
+ ctr++;
+ }
+
+ // groupMetadata
+ if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
+ {
+ metadata = new GroupRepositoryMetadata( metaGroupId );
+ }
+ else
+ {
+ /* If we reached this point, we have some bad metadata.
+ * We have a metadata file, with values for groupId / artifactId / version.
+ * But the information it is providing does not exist relative to the file location.
+ *
+ * See ${basedir}/src/test/repository/javax/maven-metadata.xml for example
+ */
+ throw new ConsumerException( metadataFile,
+ "Contents of metadata are not appropriate for its location on disk." );
+ }
+ }
+
+ return metadata;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.net.URI;
+
+/**
+ * BaseFile - convenient File object that tracks the Base Directory and can provide relative path values
+ * for the file object based on that Base Directory value.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BaseFile
+ extends File
+{
+ private File baseDir;
+
+ public BaseFile( File pathFile )
+ {
+ this( pathFile.getAbsolutePath() );
+ }
+
+ public BaseFile( File repoDir, File pathFile )
+ {
+ this( repoDir, PathUtil.getRelative( repoDir.getAbsolutePath(), pathFile ) );
+ }
+
+ public BaseFile( File parent, String child )
+ {
+ super( parent, child );
+ this.baseDir = parent;
+ }
+
+ public BaseFile( String pathname )
+ {
+ super( pathname );
+
+ // Calculate the top level directory.
+
+ File parent = this;
+ while ( parent.getParentFile() != null )
+ {
+ parent = parent.getParentFile();
+ }
+
+ this.baseDir = parent;
+ }
+
+ public BaseFile( String repoDir, File pathFile )
+ {
+ this( new File( repoDir ), pathFile );
+ }
+
+ public BaseFile( String parent, String child )
+ {
+ super( parent, child );
+ this.baseDir = new File( parent );
+ }
+
+ public BaseFile( URI uri )
+ {
+ super( uri ); // only to satisfy java compiler.
+ throw new IllegalStateException( "The " + BaseFile.class.getName()
+ + " object does not support URI construction." );
+ }
+
+ public File getBaseDir()
+ {
+ return baseDir;
+ }
+
+ public String getRelativePath()
+ {
+ return PathUtil.getRelative( this.baseDir.getAbsolutePath(), this );
+ }
+
+ public void setBaseDir( File baseDir )
+ {
+ this.baseDir = baseDir;
+ }
+
+ public void setBaseDir( String repoDir )
+ {
+ setBaseDir( new File( repoDir ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+/**
+ * PathUtil - simple utility methods for path manipulation.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtil
+{
+ public static String getRelative( String basedir, File file )
+ {
+ return getRelative( basedir, file.getAbsolutePath() );
+ }
+
+ public static String getRelative( String basedir, String child )
+ {
+ if ( child.startsWith( basedir ) )
+ {
+ // simple solution.
+ return child.substring( basedir.length() + 1 );
+ }
+
+ String absoluteBasedir = new File( basedir ).getAbsolutePath();
+ if ( child.startsWith( absoluteBasedir ) )
+ {
+ // resolved basedir solution.
+ return child.substring( absoluteBasedir.length() + 1 );
+ }
+
+ // File is not within basedir.
+ throw new IllegalStateException( "Unable to obtain relative path of file " + child
+ + ", it is not within basedir " + basedir + "." );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * AbstractArchivaCommonTestCase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractArchivaCommonTestCase
+ extends PlexusTestCase
+{
+ protected ArtifactRepository getLegacyRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ protected ArtifactRepository getDefaultRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ private void resetRepositoryState( ArtifactRepository repository )
+ throws IOException
+ {
+ File repoBaseDir = new File( repository.getBasedir() );
+
+ List tmpfiles = FileUtils.getFiles( repoBaseDir, ".*", "" );
+ for ( Iterator it = tmpfiles.iterator(); it.hasNext(); )
+ {
+ File hit = (File) it.next();
+ if ( hit.exists() )
+ {
+ if ( hit.isFile() )
+ {
+ hit.delete();
+ }
+
+ if ( hit.isDirectory() )
+ {
+ FileUtils.deleteDirectory( hit );
+ }
+ }
+ }
+ }
+
+ protected ArtifactRepository createRepository( File basedir, String layout )
+ throws Exception
+ {
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+ return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, repoLayout, null, null );
+ }
+
+ public List getLegacyLayoutArtifactPaths()
+ {
+ List files = new ArrayList();
+
+ files.add( "invalid/jars/1.0/invalid-1.0.jar" );
+ files.add( "invalid/jars/invalid-1.0.rar" );
+ files.add( "invalid/jars/invalid.jar" );
+ files.add( "invalid/invalid-1.0.jar" );
+ files.add( "javax.sql/jars/jdbc-2.0.jar" );
+ files.add( "org.apache.maven/jars/some-ejb-1.0-client.jar" );
+ files.add( "org.apache.maven/jars/testing-1.0.jar" );
+ files.add( "org.apache.maven/jars/testing-1.0-sources.jar" );
+ files.add( "org.apache.maven/jars/testing-UNKNOWN.jar" );
+ files.add( "org.apache.maven/jars/testing-1.0.zip" );
+ files.add( "org.apache.maven/jars/testing-1.0-20050611.112233-1.jar" );
+ files.add( "org.apache.maven/jars/testing-1.0.tar.gz" );
+ files.add( "org.apache.maven.update/jars/test-not-updated-1.0.jar" );
+ files.add( "org.apache.maven.update/jars/test-updated-1.0.jar" );
+
+ return files;
+ }
+
+ public List getDefaultLayoutArtifactPaths()
+ {
+ List files = new ArrayList();
+
+ files.add( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+ files.add( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+ files.add( "invalid/invalid/1.0/invalid-1.0b.jar" );
+ files.add( "invalid/invalid/1.0/invalid-2.0.jar" );
+ files.add( "invalid/invalid-1.0.jar" );
+ files.add( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+ files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" );
+ files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" );
+ files.add( "org/apache/maven/A/1.0/A-1.0.war" );
+ files.add( "org/apache/maven/A/1.0/A-1.0.pom" );
+ files.add( "org/apache/maven/B/2.0/B-2.0.pom" );
+ files.add( "org/apache/maven/B/1.0/B-1.0.pom" );
+ files.add( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" );
+ files.add( "org/apache/maven/C/1.0/C-1.0.war" );
+ files.add( "org/apache/maven/C/1.0/C-1.0.pom" );
+ files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" );
+ files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar" );
+ files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+ files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar" );
+ files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" );
+ files.add( "org/apache/maven/testing/1.0/testing-1.0-test-sources.jar" );
+ files.add( "org/apache/maven/testing/1.0/testing-1.0.jar" );
+ files.add( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" );
+ files.add( "org/apache/maven/testing/1.0/testing-1.0.zip" );
+ files.add( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" );
+ files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" );
+ files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.jar" );
+ files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" );
+ files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.jar" );
+ files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" );
+ files.add( "javax/sql/jdbc/2.0/jdbc-2.0.jar" );
+
+ return files;
+ }
+
+ public List getDefaultLayoutMetadataPaths()
+ {
+ List files = new ArrayList();
+
+ files.add( "org/apache/maven/some-ejb/1.0/maven-metadata.xml" );
+ files.add( "org/apache/maven/update/test-not-updated/maven-metadata.xml" );
+ files.add( "org/apache/maven/update/test-updated/maven-metadata.xml" );
+ files.add( "org/apache/maven/maven-metadata.xml" );
+ files.add( "org/apache/testgroup/discovery/1.0/maven-metadata.xml" );
+ files.add( "org/apache/testgroup/discovery/maven-metadata.xml" );
+ files.add( "javax/sql/jdbc/2.0/maven-metadata-repository.xml" );
+ files.add( "javax/sql/jdbc/maven-metadata-repository.xml" );
+ files.add( "javax/sql/maven-metadata-repository.xml" );
+ files.add( "javax/maven-metadata.xml" );
+
+ return files;
+ }
+
+ public List getDefaultLayoutModelPaths()
+ {
+ List files = new ArrayList();
+
+ files.add( "org/apache/maven/A/1.0/A-1.0.pom" );
+ files.add( "org/apache/maven/B/2.0/B-2.0.pom" );
+ files.add( "org/apache/maven/B/1.0/B-1.0.pom" );
+ files.add( "org/apache/maven/C/1.0/C-1.0.pom" );
+ files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" );
+ files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" );
+ files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" );
+ files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" );
+ files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" );
+ files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" );
+
+ return files;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common" );
+ //$JUnit-BEGIN$
+ suite.addTest( org.apache.maven.archiva.common.artifact.builder.AllTests.suite() );
+ suite.addTest( org.apache.maven.archiva.common.consumers.AllTests.suite() );
+ suite.addTest( org.apache.maven.archiva.common.utils.AllTests.suite() );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.PlexusTestCase;
+
+/**
+ * AbstractLayoutArtifactBuilderTestCase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractLayoutArtifactBuilderTestCase
+extends PlexusTestCase
+{
+
+ protected void assertArtifact( String groupId, String artifactId, String version, String type, String classifier, Artifact artifact )
+ {
+ assertNotNull( "Artifact cannot be null.", artifact );
+
+ assertEquals( "Artifact groupId", groupId, artifact.getGroupId() );
+ assertEquals( "Artifact artifactId", artifactId, artifact.getArtifactId() );
+ assertEquals( "Artifact version", version, artifact.getVersion() );
+ assertEquals( "Artifact type", type, artifact.getType() );
+
+ if ( StringUtils.isNotBlank( classifier ) )
+ {
+ assertEquals( "Artifact classifier", classifier, artifact.getClassifier() );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer.builders" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( LegacyLayoutArtifactBuilderTest.class );
+ suite.addTestSuite( DefaultLayoutArtifactBuilderTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * DefaultLayoutArtifactBuilderTest
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultLayoutArtifactBuilderTest
+ extends AbstractLayoutArtifactBuilderTestCase
+{
+ LayoutArtifactBuilder builder;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "default" );
+ assertNotNull( builder );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( builder != null )
+ {
+ release( builder );
+ }
+ super.tearDown();
+ }
+
+ public void testPathDistributionArtifacts()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ) );
+
+ assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0.zip" ) );
+ }
+
+ public void testPathNormal()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+ .build( "/org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+ assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder
+ .build( "org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, builder.build( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ) );
+
+ }
+
+ public void testPathSnapshots()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT", "jar", null, builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar" ) );
+
+ assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ) );
+ }
+
+ public void testPathSnapshotWithClassifier()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", builder
+ .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ) );
+ }
+
+ public void testPathWithClassifier()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", builder
+ .build( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ) );
+ }
+
+ public void testPathWithJavaSourceInclusion()
+ throws BuilderException
+ {
+ assertArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", builder
+ .build( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ) );
+ }
+
+ public void testProblemMissingType()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1/invalid-1" );
+ fail( "Should have detected missing type." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename does not have an extension.", e.getMessage() );
+ }
+ }
+
+ public void testProblemNonSnapshotInSnapshotDir()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+ fail( "Non Snapshot artifact inside of an Snapshot dir is invalid." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", e.getMessage() );
+ }
+ }
+
+ public void testProblemPathTooShort()
+ {
+ try
+ {
+ builder.build( "invalid/invalid-1.0.jar" );
+ fail( "Should have detected that path is too short." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path is too short to build an artifact from.", e.getMessage() );
+ }
+ }
+
+ public void testProblemTimestampSnapshotNotInSnapshotDir()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+ fail( "Timestamped Snapshot artifact not inside of an Snapshot dir is invalid." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ // TODO: Is this really the right thing to do for this kind of artifact??
+ assertEquals( "Built snapshot artifact base version does not match path version: 1.0-SNAPSHOT; "
+ + "should have been version: 1.0-20050611.123456-1", e.getMessage() );
+ }
+ }
+
+ public void testProblemVersionPathMismatch()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0/invalid-2.0.jar" );
+ fail( "Should have detected version mismatch between path and artifact." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Built artifact version does not match path version", e.getMessage() );
+ }
+ }
+
+ public void testProblemVersionPathMismatchAlt()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1.0/invalid-1.0b.jar" );
+ fail( "Should have version mismatch between directory and artifact." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path version does not corresspond to an artifact version", e.getMessage() );
+ }
+ }
+
+ public void testProblemWrongArtifactId()
+ {
+ try
+ {
+ builder.build( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+ fail( "Should have detected wrong artifact Id." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename does not correspond to an artifact.", e.getMessage() );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.artifact.builder;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+/**
+ * LegacyLayoutArtifactBuilderTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class LegacyLayoutArtifactBuilderTest
+ extends AbstractLayoutArtifactBuilderTestCase
+{
+ LayoutArtifactBuilder builder;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "legacy" );
+ assertNotNull( builder );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( builder != null )
+ {
+ release( builder );
+ }
+ super.tearDown();
+ }
+
+ public void testPathNormal()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "javax.sql/jars/jdbc-2.0.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifact );
+ }
+
+ public void testPathFinal()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar" );
+
+ assertArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606", "jar", null, artifact );
+ }
+
+ public void testPathSnapshot()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar" );
+
+ assertArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT", "jar", null, artifact );
+ }
+
+ public void testPathJavadoc()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc", artifact );
+ }
+
+ public void testPathSources()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "javax.sql/java-sources/jdbc-2.0-sources.jar" );
+
+ assertArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources", artifact );
+ }
+
+ public void testPathPlugin()
+ throws BuilderException
+ {
+ Artifact artifact = builder.build( "maven/plugins/maven-test-plugin-1.8.jar" );
+
+ assertArtifact( "maven", "maven-test-plugin", "1.8", "plugin", null, artifact );
+ }
+
+ public void testProblemNoType()
+ {
+ try
+ {
+ builder.build( "invalid/invalid/1/invalid-1" );
+
+ fail( "Should have detected no type." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path does not match a legacy repository path for an artifact", e.getMessage() );
+ }
+ }
+
+ public void testProblemWrongArtifactPackaging()
+ throws ComponentLookupException
+ {
+ try
+ {
+ builder.build( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
+
+ fail( "Should have detected wrong package extension." );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path type does not match the extension", e.getMessage() );
+ }
+ }
+
+ public void testProblemNoArtifactId()
+ {
+ try
+ {
+ builder.build( "groupId/jars/-1.0.jar" );
+
+ fail( "Should have detected artifactId is missing" );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename artifactId is empty", e.getMessage() );
+ }
+
+ try
+ {
+ builder.build( "groupId/jars/1.0.jar" );
+
+ fail( "Should have detected artifactId is missing" );
+ }
+ catch ( BuilderException e )
+ {
+ /* expected path */
+ assertEquals( "Path filename artifactId is empty", e.getMessage() );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.AbstractArchivaCommonTestCase;
+
+/**
+ * AbstractGenericConsumerTestCase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractGenericConsumerTestCase
+ extends AbstractArchivaCommonTestCase
+{
+ protected ConsumerFactory consumerFactory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ consumerFactory = (ConsumerFactory) lookup( ConsumerFactory.ROLE );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ if ( consumerFactory != null )
+ {
+ release( consumerFactory );
+ }
+ super.tearDown();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.consumers" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( GenericArtifactConsumerTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * FileProblemsTracker
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class FileProblemsTracker
+{
+ private Map problemMap = new HashMap();
+
+ public void addProblem( BaseFile file, String message )
+ {
+ String path = file.getRelativePath();
+ addProblem( path, message );
+ }
+
+ private void addProblem( String path, String message )
+ {
+ List problems = getProblems( path );
+ problems.add( message );
+ problemMap.put( path, problems );
+ }
+
+ public void addProblem( ConsumerException e )
+ {
+ if ( e.getFile() != null )
+ {
+ this.addProblem( e.getFile(), e.getMessage() );
+ }
+ else
+ {
+ this.addProblem( "|fatal|", e.getMessage() );
+ }
+ }
+
+ public boolean hasProblems( String path )
+ {
+ if ( !problemMap.containsKey( path ) )
+ {
+ // No tracking of path at all.
+ return false;
+ }
+
+ List problems = (List) problemMap.get( path );
+ if ( problems == null )
+ {
+ // found path, but no list.
+ return false;
+ }
+
+ return !problems.isEmpty();
+ }
+
+ public Set getPaths()
+ {
+ return problemMap.keySet();
+ }
+
+ public List getProblems( String path )
+ {
+ List problems = (List) problemMap.get( path );
+ if ( problems == null )
+ {
+ problems = new ArrayList();
+ }
+
+ return problems;
+ }
+
+ public int getProblemCount()
+ {
+ int count = 0;
+ for ( Iterator it = problemMap.values().iterator(); it.hasNext(); )
+ {
+ List problems = (List) it.next();
+ count += problems.size();
+ }
+
+ return count;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * GenericArtifactConsumerTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class GenericArtifactConsumerTest
+ extends AbstractGenericConsumerTestCase
+{
+ private MockArtifactConsumer getMockArtifactConsumer()
+ throws Exception
+ {
+ return (MockArtifactConsumer) consumerFactory.createConsumer( "mock-artifact" );
+ }
+
+ public void testScanLegacy()
+ throws Exception
+ {
+ ArtifactRepository repository = getLegacyRepository();
+ List consumers = new ArrayList();
+
+ MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+ mockConsumer.init( repository );
+
+ consumers.add( mockConsumer );
+
+ List files = getLegacyLayoutArtifactPaths();
+ for ( Iterator it = files.iterator(); it.hasNext(); )
+ {
+ String path = (String) it.next();
+ try
+ {
+ mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) );
+ }
+ catch ( ConsumerException e )
+ {
+ mockConsumer.getProblemsTracker().addProblem( e );
+ }
+ }
+
+ assertNotNull( consumers );
+
+ FileProblemsTracker tracker = mockConsumer.getProblemsTracker();
+
+ assertTracker( tracker, 16 );
+
+ assertHasFailureMessage( "Path does not match a legacy repository path for an artifact",
+ "invalid/invalid-1.0.jar", tracker );
+ assertHasFailureMessage( "Path filename version is empty", "invalid/jars/invalid.jar", tracker );
+ assertHasFailureMessage( "Path does not match a legacy repository path for an artifact",
+ "invalid/jars/1.0/invalid-1.0.jar", tracker );
+
+ assertEquals( 10, mockConsumer.getArtifactMap().size() );
+ }
+
+ public void testScanDefault()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+
+ MockArtifactConsumer mockConsumer = getMockArtifactConsumer();
+ mockConsumer.init( repository );
+
+ consumers.add( mockConsumer );
+
+ List files = getDefaultLayoutArtifactPaths();
+ for ( Iterator it = files.iterator(); it.hasNext(); )
+ {
+ String path = (String) it.next();
+ try
+ {
+ mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) );
+ }
+ catch ( ConsumerException e )
+ {
+ mockConsumer.getProblemsTracker().addProblem( e );
+ }
+ }
+
+ // Test gathered information from Mock consumer.
+
+ assertNotNull( consumers );
+
+ FileProblemsTracker tracker = mockConsumer.getProblemsTracker();
+
+ assertTracker( tracker, 21 );
+
+ assertHasFailureMessage( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
+ "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar", tracker );
+ assertHasFailureMessage( "Path is too short to build an artifact from.", "invalid/invalid-1.0.jar", tracker );
+ assertHasFailureMessage( "Built artifact version does not match path version",
+ "invalid/invalid/1.0/invalid-2.0.jar", tracker );
+
+ assertEquals( 25, mockConsumer.getArtifactMap().size() );
+
+ // Test for known include artifacts
+
+ Collection artifacts = mockConsumer.getArtifactMap().values();
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources", artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, artifacts );
+ assertHasArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, artifacts );
+ assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", artifacts );
+
+ // Test for known excluded files and dirs to validate exclusions.
+
+ Iterator it = mockConsumer.getArtifactMap().values().iterator();
+ while ( it.hasNext() )
+ {
+ Artifact a = (Artifact) it.next();
+ assertTrue( "Artifact " + a + " should have it's .getFile() set.", a.getFile() != null );
+ assertTrue( "Artifact " + a + " should have it's .getRepository() set.", a.getRepository() != null );
+ assertTrue( "Artifact " + a + " should have non-null repository url.", a.getRepository().getUrl() != null );
+ assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
+ assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+ }
+ }
+
+ private void dumpProblems( FileProblemsTracker tracker )
+ {
+ int problemNum = 0;
+ System.out.println( "-- ProblemTracker dump -------------------------" );
+ for ( Iterator itPaths = tracker.getPaths().iterator(); itPaths.hasNext(); )
+ {
+ String path = (String) itPaths.next();
+ System.out.println( " [" + problemNum + "]: " + path );
+
+ int messageNum = 0;
+ for ( Iterator itProblems = tracker.getProblems( path ).iterator(); itProblems.hasNext(); )
+ {
+ String message = (String) itProblems.next();
+ System.out.println( " [" + messageNum + "]: " + message );
+ messageNum++;
+ }
+
+ problemNum++;
+ }
+ }
+
+ private void assertTracker( FileProblemsTracker tracker, int expectedProblemCount )
+ {
+ assertNotNull( "ProblemsTracker should not be null.", tracker );
+
+ int actualProblemCount = tracker.getProblemCount();
+ if ( expectedProblemCount != actualProblemCount )
+ {
+ dumpProblems( tracker );
+ fail( "Problem count (across all paths) expected:<" + expectedProblemCount + ">, actual:<"
+ + actualProblemCount + ">" );
+ }
+ }
+
+ private void assertHasFailureMessage( String message, String path, FileProblemsTracker tracker )
+ {
+ if ( !tracker.hasProblems( path ) )
+ {
+ fail( "There are no messages for expected path [" + path + "]" );
+ }
+
+ assertTrue( "Unable to find message [" + message + "] in path [" + path + "]", tracker.getProblems( path )
+ .contains( message ) );
+ }
+
+ private void assertHasArtifact( String groupId, String artifactId, String version, String type, String classifier,
+ Collection collection )
+ {
+ for ( Iterator it = collection.iterator(); it.hasNext(); )
+ {
+ Artifact artifact = (Artifact) it.next();
+ if ( StringUtils.equals( groupId, artifact.getGroupId() )
+ && StringUtils.equals( artifactId, artifact.getArtifactId() )
+ && StringUtils.equals( version, artifact.getVersion() )
+ && StringUtils.equals( type, artifact.getType() )
+ && StringUtils.equals( classifier, artifact.getClassifier() ) )
+ {
+ // Found it!
+ return;
+ }
+ }
+
+ fail( "Was unable to find artifact " + groupId + ":" + artifactId + ":" + version + ":" + type + ":"
+ + classifier );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.common.utils.PathUtil;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockArtifactConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ * role-hint="mock-artifact"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ private Map artifactMap = new HashMap();
+
+ private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+ public void processArtifact( Artifact artifact, BaseFile file )
+ {
+ String relpath = PathUtil.getRelative( repository.getBasedir(), file );
+ artifactMap.put( relpath, artifact );
+ }
+
+ public void processFileProblem( BaseFile file, String message )
+ {
+ problemsTracker.addProblem( file, message );
+ }
+
+ public Map getArtifactMap()
+ {
+ return artifactMap;
+ }
+
+ public String getName()
+ {
+ return "Mock Artifact Consumer (Testing Only)";
+ }
+
+ public FileProblemsTracker getProblemsTracker()
+ {
+ return problemsTracker;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.model.Model;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockModelConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ * role-hint="mock-model"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockModelConsumer
+ extends GenericModelConsumer
+{
+ private Map modelMap = new HashMap();
+
+ private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+ public void processModel( Model model, BaseFile file )
+ {
+ modelMap.put( file.getRelativePath(), model );
+ }
+
+ public void processFileProblem( BaseFile file, String message )
+ {
+ problemsTracker.addProblem( file, message );
+ }
+
+ public Map getModelMap()
+ {
+ return modelMap;
+ }
+
+ public String getName()
+ {
+ return "Mock Model Consumer (Testing Only)";
+ }
+
+ public FileProblemsTracker getProblemsTracker()
+ {
+ return problemsTracker;
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.common.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * MockRepositoryMetadataConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ * role-hint="mock-metadata"
+ * instantiation-strategy="per-lookup"
+ */
+public class MockRepositoryMetadataConsumer
+ extends GenericRepositoryMetadataConsumer
+{
+ private Map repositoryMetadataMap = new HashMap();
+
+ private FileProblemsTracker problemsTracker = new FileProblemsTracker();
+
+ public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file )
+ {
+ repositoryMetadataMap.put( file.getRelativePath(), metadata );
+ }
+
+ public void processFileProblem( BaseFile file, String message )
+ {
+ problemsTracker.addProblem( file, message );
+ }
+
+ public Map getRepositoryMetadataMap()
+ {
+ return repositoryMetadataMap;
+ }
+
+ public String getName()
+ {
+ return "Mock RepositoryMetadata Consumer (Testing Only)";
+ }
+
+ public FileProblemsTracker getProblemsTracker()
+ {
+ return problemsTracker;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.utils" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( PathUtilTest.class );
+ suite.addTestSuite( BaseFileTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+import junit.framework.TestCase;
+
+/**
+ * BaseFileTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class BaseFileTest
+ extends TestCase
+{
+ public void testFileString()
+ {
+ File repoDir = new File( "/home/user/foo/repository" );
+ String pathFile = "path/to/resource.xml";
+ BaseFile file = new BaseFile( repoDir, pathFile );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testFileFile()
+ {
+ File repoDir = new File( "/home/user/foo/repository" );
+ File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+ BaseFile file = new BaseFile( repoDir, pathFile );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testStringFile()
+ {
+ String repoDir = "/home/user/foo/repository";
+ File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+ BaseFile file = new BaseFile( repoDir, pathFile );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testFileThenSetBaseString()
+ {
+ String repoDir = "/home/user/foo/repository";
+ File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+ BaseFile file = new BaseFile( pathFile );
+ file.setBaseDir( repoDir );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testFileThenSetBaseFile()
+ {
+ File repoDir = new File( "/home/user/foo/repository" );
+ File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" );
+ BaseFile file = new BaseFile( pathFile );
+ file.setBaseDir( repoDir );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testStringThenSetBaseString()
+ {
+ String repoDir = "/home/user/foo/repository";
+ String pathFile = "/home/user/foo/repository/path/to/resource.xml";
+ BaseFile file = new BaseFile( pathFile );
+ file.setBaseDir( repoDir );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+
+ public void testStringThenSetBaseFile()
+ {
+ File repoDir = new File( "/home/user/foo/repository" );
+ String pathFile = "/home/user/foo/repository/path/to/resource.xml";
+ BaseFile file = new BaseFile( pathFile );
+ file.setBaseDir( repoDir );
+
+ assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() );
+ assertEquals( "path/to/resource.xml", file.getRelativePath() );
+ assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.common.utils;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.utils.PathUtil;
+
+import junit.framework.TestCase;
+
+/**
+ * PathUtilTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class PathUtilTest
+ extends TestCase
+{
+ public void testToRelative()
+ {
+ assertEquals( "path/to/resource.xml", PathUtil.getRelative( "/home/user/foo/repository",
+ "/home/user/foo/repository/path/to/resource.xml" ) );
+ }
+}
--- /dev/null
+not a real CVS root - for testing exclusions
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+not a real CVS root - for testing exclusions
--- /dev/null
+test KEYS file
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<!-- This metdata is intentionally wrong. -->
+<metadata>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc</artifactId>
+ <version>2.0</version>
+</metadata>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc</artifactId>
+ <version>2.0</version>
+</metadata>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc</artifactId>
+ <version>2.0</version>
+ <versioning>
+ <versions>
+ <version>2.0</version>
+ </versions>
+ </versioning>
+</metadata>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc</artifactId>
+ <version>2.0</version>
+</metadata>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>A</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>war</packaging>
+</project>
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>B</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>pom</packaging>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>B</artifactId>
+ <version>2.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>pom</packaging>
+</project>
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>C</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>war</packaging>
+</project>
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>discovery</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>pom</packaging>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven</groupId>
+</metadata>
\ No newline at end of file
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>C</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <!-- default packaging is jar -->
+ <!--packaging>jar</packaging-->
+</project>
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>C</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <!-- specified packaging -->
+ <packaging>jar</packaging>
+</project>
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.update</groupId>
+ <artifactId>test-not-updated</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <!-- default packaging is jar -->
+ <!--packaging>jar</packaging-->
+</project>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.update</groupId>
+ <artifactId>test-not-updated</artifactId>
+</metadata>
\ No newline at end of file
--- /dev/null
+dummy content. sample file only.\r
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.update</groupId>
+ <artifactId>test-updated</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <!-- default packaging is jar -->
+ <!--packaging>jar</packaging-->
+</project>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.maven.update</groupId>
+ <artifactId>test-updated</artifactId>
+</metadata>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.testgroup</groupId>
+ <artifactId>discovery</artifactId>
+ <version>1.0</version>
+ <name>Maven Test Repository Artifact Discovery</name>
+ <packaging>pom</packaging>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.testgroup</groupId>
+ <artifactId>discovery</artifactId>
+ <version>1.0</version>
+</metadata>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<metadata>
+ <groupId>org.apache.testgroup</groupId>
+ <artifactId>discovery</artifactId>
+</metadata>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+ <role-hint>mock-artifact</role-hint>
+ <implementation>org.apache.maven.archiva.common.consumers.MockArtifactConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+ <role-hint>mock-model</role-hint>
+ <implementation>org.apache.maven.archiva.discoverer.consumers.MockModelConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.discoverer.DiscovererConsumer</role>
+ <role-hint>mock-metadata</role-hint>
+ <implementation>org.apache.maven.archiva.discoverer.consumers.MockRepositoryMetadataConsumer</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
</description>\r
</field>\r
<field>\r
- <name>indexerCronExpression</name>\r
+ <name>dataRefreshCronExpression</name>\r
<version>1.0.0</version>\r
<type>String</type>\r
- <description>When to run the indexing mechanism. Default is every 0 and 30 mins of any hour.</description>\r
+ <description>When to run the data refresh task. Default is every 30 mins (translated as every 0 and 30 minute reading of every hour)</description>\r
<defaultValue>0 0,30 * * * ?</defaultValue>\r
- </field>\r
+ </field>
+ <!--
<field>\r
<name>globalBlackListPatterns</name>\r
<version>1.0.0</version>\r
<type>String</type>\r
<multiplicity>*</multiplicity>\r
</association>\r
- </field>\r
+ </field>
+ -->\r
<field>\r
<name>proxy</name>\r
<version>1.0.0</version>\r
// check default configuration
assertNotNull( "check configuration returned", configuration );
assertEquals( "check configuration has default elements", "0 0,30 * * * ?",
- configuration.getIndexerCronExpression() );
+ configuration.getDataRefreshCronExpression() );
assertNull( "check configuration has default elements", configuration.getIndexPath() );
assertTrue( "check configuration has default elements", configuration.getRepositories().isEmpty() );
}
<artifactId>archiva-converter</artifactId>
<name>Archiva Repository Converter</name>
<dependencies>
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-discoverer</artifactId>
+ </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
<artifactId>maven-model-converter</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-reports-standard</artifactId>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-digest</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <!-- TEST DEPS -->
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.7.3.3</version>
+ <scope>test</scope>
+ </dependency>
<!-- Needed for PlexusTestCase -->
<dependency>
<groupId>org.codehaus.plexus</groupId>
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * ConversionEvent
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ConversionEvent
+{
+ public static final int STARTED = 0;
+
+ public static final int PROCESSED = 1;
+
+ public static final int WARNING = 2;
+
+ public static final int ERROR = 3;
+
+ public static final int FINISHED = 4;
+
+ private int type;
+
+ private String message;
+
+ private Artifact artifact;
+
+ private ArtifactRepository repository;
+
+ private Exception exception;
+
+ public ConversionEvent( ArtifactRepository repository, int type )
+ {
+ this.repository = repository;
+ this.type = type;
+ }
+
+ public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact )
+ {
+ this( repository, type );
+ this.artifact = artifact;
+ }
+
+ public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, String message )
+ {
+ this( repository, type );
+ this.artifact = artifact;
+ this.message = message;
+ }
+
+ public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, Exception exception )
+ {
+ this( repository, type );
+ this.artifact = artifact;
+ this.exception = exception;
+ }
+
+ public Artifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public Exception getException()
+ {
+ return exception;
+ }
+
+ public String getMessage()
+ {
+ return message;
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return repository;
+ }
+
+ /**
+ * <p>
+ * The type of event.
+ * </p>
+ *
+ * <p>
+ * Can be one of the following ...
+ * </p>
+ *
+ * <ul>
+ * <li>{@link #STARTED} - the whole repository conversion process has started.
+ * only seen when using the whole repository conversion technique with the
+ * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)}
+ * method.</li>
+ * <li>{@link #PROCESSED} - a specific artifact has been processed.</li>
+ * <li>{@link #WARNING} - a warning has been detected for a specific artifact during the conversion process.</li>
+ * <li>{@link #ERROR} - an error in the processing of an artifact has been detected.</li>
+ * <li>{@link #FINISHED} - the whole repository conversion process has finished.
+ * only seen when using the whole repository conversion technique with the
+ * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)}
+ * method.</li>
+ * </ul>
+ * @return
+ */
+ public int getType()
+ {
+ return type;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ConversionListener
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public interface ConversionListener
+{
+ public void conversionEvent( ConversionEvent event );
+}
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.maven.archiva.converter.transaction.FileTransaction;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.Model;
import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
import org.apache.maven.model.converter.ModelConverter;
import org.apache.maven.model.converter.PomTranslationException;
import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.i18n.I18N;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
+import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
* @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default"
*/
public class DefaultRepositoryConverter
+ extends AbstractLogEnabled
implements RepositoryConverter
{
/**
*/
private ArtifactFactory artifactFactory;
- /**
- * @plexus.requirement
- */
- private ArtifactPomRewriter rewriter;
-
/**
* @plexus.requirement
*/
*/
private I18N i18n;
- public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ private List listeners = new ArrayList();
+
+ public void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
}
- if ( validateMetadata( artifact, reporter ) )
+ if ( validateMetadata( artifact ) )
{
FileTransaction transaction = new FileTransaction();
- if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+ if ( copyPom( artifact, targetRepository, transaction ) )
{
- if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+ if ( copyArtifact( artifact, targetRepository, transaction ) )
{
Metadata metadata = createBaseMetadata( artifact );
Versioning versioning = new Versioning();
versioning.addVersion( artifact.getBaseVersion() );
metadata.setVersioning( versioning );
- updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
- transaction );
+ updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, transaction );
metadata = createBaseMetadata( artifact );
metadata.setVersion( artifact.getBaseVersion() );
Metadata newMetadata, FileTransaction transaction )
throws RepositoryConversionException
{
- File file = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File file = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
Metadata metadata;
boolean changed;
return metadata;
}
- private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
+ private boolean validateMetadata( Artifact artifact )
throws RepositoryConversionException
{
ArtifactRepository repository = artifact.getRepository();
boolean result = true;
RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
- File file =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+ File file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = validateMetadata( metadata, repositoryMetadata, artifact );
}
repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = result && validateMetadata( metadata, repositoryMetadata, artifact );
}
return result;
}
- private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
- ReportingDatabase reporter )
+ private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
{
String groupIdKey;
String artifactIdKey = null;
if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
{
- addFailure( reporter, artifact, groupIdKey );
+ addFailure( artifact, groupIdKey );
result = false;
}
if ( !repositoryMetadata.storedInGroupDirectory() )
{
if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
{
- addFailure( reporter, artifact, artifactIdKey );
+ addFailure( artifact, artifactIdKey );
result = false;
}
if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
boolean foundVersion = false;
if ( metadata.getVersioning() != null )
{
- for ( Iterator i = metadata.getVersioning().getVersions().iterator();
- i.hasNext() && !foundVersion; )
+ for ( Iterator i = metadata.getVersioning().getVersions().iterator(); i.hasNext() && !foundVersion; )
{
String version = (String) i.next();
if ( version.equals( artifact.getBaseVersion() ) )
if ( !foundVersion )
{
- addFailure( reporter, artifact, versionsKey );
+ addFailure( artifact, versionsKey );
result = false;
}
}
// snapshot metadata
if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
{
- addFailure( reporter, artifact, versionKey );
+ addFailure( artifact, versionKey );
result = false;
}
if ( !correct )
{
- addFailure( reporter, artifact, snapshotKey );
+ addFailure( artifact, snapshotKey );
result = false;
}
}
return result;
}
- private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+ private void addFailure( Artifact artifact, String key )
{
- addFailureWithReason( reporter, artifact, getI18NString( key ) );
-
+ addFailureWithReason( artifact, getI18NString( key ) );
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+ private void addWarning( Artifact artifact, String message )
{
// TODO: should we be able to identify/fix these?
- reporter.addWarning( artifact, null, null, message );
+ // TODO: write archiva-artifact-repair module
+ triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.WARNING, artifact,
+ message ) );
}
- private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+ private void addFailureWithReason( Artifact artifact, String reason )
{
// TODO: should we be able to identify/fix these?
- reporter.addFailure( artifact, null, null, reason );
+ triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.ERROR, artifact, reason ) );
}
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
- Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion() );
+ Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact
+ .getVersion() );
pom.setBaseVersion( artifact.getBaseVersion() );
ArtifactRepository repository = artifact.getRepository();
File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
boolean checksumsValid = false;
try
{
- if ( testChecksums( artifact, file, reporter ) )
+ if ( testChecksums( artifact, file ) )
{
checksumsValid = true;
}
if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
{
- Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
+ Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+ .getArtifactId(), artifact.getVersion() );
targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
}
Model v4Model = translator.translate( v3Model );
- translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(),
- v3Model.getVersion(), v3Model.getPackage() );
+ translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), v3Model
+ .getVersion(), v3Model.getPackage() );
writer = new StringWriter();
MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
for ( Iterator i = warnings.iterator(); i.hasNext(); )
{
String message = (String) i.next();
- addWarning( reporter, artifact, message );
+ addWarning( artifact, message );
}
}
catch ( XmlPullParserException e )
{
- addFailureWithReason( reporter, artifact,
- getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
catch ( IOException e )
}
catch ( PomTranslationException e )
{
- addFailureWithReason( reporter, artifact,
- getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
finally
}
else
{
- addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
+ addWarning( artifact, getI18NString( "warning.missing.pom" ) );
}
return result;
}
throws IOException
{
Properties properties = v3Model.getProperties();
- if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
- properties.containsKey( "relocated.version" ) )
+ if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" )
+ || properties.containsKey( "relocated.version" ) )
{
String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
properties.remove( "relocated.groupId" );
return i18n.getString( getClass().getName(), Locale.getDefault(), key );
}
- private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
+ private boolean testChecksums( Artifact artifact, File file )
throws IOException
{
boolean result = true;
{
Digester digester = (Digester) it.next();
result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
- reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
+ artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
}
return result;
}
return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
}
- private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
- Artifact artifact, String key )
+ private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact, String key )
throws IOException
{
boolean result = true;
}
catch ( DigesterException e )
{
- addFailure( reporter, artifact, key );
+ addFailure( artifact, key );
result = false;
}
}
return result;
}
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
File sourceFile = artifact.getFile();
matching = FileUtils.contentEquals( sourceFile, targetFile );
if ( !matching )
{
- addFailure( reporter, artifact, "failure.target.already.exists" );
+ addFailure( artifact, "failure.target.already.exists" );
result = false;
}
}
{
if ( force || !matching )
{
- if ( testChecksums( artifact, sourceFile, reporter ) )
+ if ( testChecksums( artifact, sourceFile ) )
{
transaction.copyFile( sourceFile, targetFile, digesters );
}
return result;
}
- public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ public void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
try
{
- convert( artifact, targetRepository, reporter );
+ convert( artifact, targetRepository );
}
catch ( RepositoryConversionException e )
{
- // Need to add:
- // artifact
- // processor
- // problem
- // reason
- //TODO: this doesn't really provide any real facility for a decent error message, having
- // the stack trace would be useful. I also have no idea what a processor is currently or
- // how to get hold of it here.
-
- reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
+ triggerConversionEvent( new ConversionEvent( targetRepository, ConversionEvent.ERROR, artifact, e ) );
+ }
+ }
+ }
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ public void addConversionListener( ConversionListener listener )
+ {
+ listeners.add( listener );
+ }
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ public void removeConversionListener( ConversionListener listener )
+ {
+ listeners.remove( listener );
+ }
+
+ private void triggerConversionEvent( ConversionEvent event )
+ {
+ Iterator it = listeners.iterator();
+ while ( it.hasNext() )
+ {
+ ConversionListener listener = (ConversionListener) it.next();
+
+ try
+ {
+ listener.conversionEvent( event );
+ }
+ catch ( Throwable t )
+ {
+ getLogger().warn( "ConversionEvent resulted in exception from listener: " + t.getMessage(), t );
}
}
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
*
* @param artifact the artifact to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversion
*/
- void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException;
/**
*
* @param artifacts the set of artifacts to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversions
*/
- void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException;
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ void addConversionListener( ConversionListener listener );
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ void removeConversionListener( ConversionListener listener );
}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Jason van Zyl
+ * @plexus.component
+ * @todo turn this into a general conversion component and hide all this crap here.
+ * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
+ */
+public class DefaultLegacyRepositoryConverter
+ implements LegacyRepositoryConverter
+{
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private ArtifactRepositoryLayout legacyLayout;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private ArtifactRepositoryLayout defaultLayout;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactRepositoryFactory artifactRepositoryFactory;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private Discoverer discoverer;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.common.consumers.Consumer" role-hint="legacy-converter"
+ */
+ private LegacyConverterArtifactConsumer legacyConverterConsumer;
+
+ public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
+ List fileExclusionPatterns, boolean includeSnapshots )
+ throws RepositoryConversionException
+ {
+ ArtifactRepository legacyRepository;
+
+ ArtifactRepository repository;
+
+ try
+ {
+ legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDirectory
+ .toURI().toURL().toString(), legacyLayout, null, null );
+
+ repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDirectory.toURI()
+ .toURL().toString(), defaultLayout, null, null );
+ }
+ catch ( MalformedURLException e )
+ {
+ throw new RepositoryConversionException( "Error convering legacy repository.", e );
+ }
+
+ try
+ {
+ List consumers = new ArrayList();
+ legacyConverterConsumer.setDestinationRepository( repository );
+ consumers.add( legacyConverterConsumer );
+
+ discoverer.walkRepository( legacyRepository, consumers, includeSnapshots );
+ }
+ catch ( DiscovererException e )
+ {
+ throw new RepositoryConversionException( "Unable to convert repository due to discoverer error:"
+ + e.getMessage(), e );
+ }
+ }
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ public void addConversionListener( ConversionListener listener )
+ {
+ legacyConverterConsumer.addConversionListener( listener );
+ }
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ public void removeConversionListener( ConversionListener listener )
+ {
+ legacyConverterConsumer.removeConversionListener( listener );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * LegacyConverterArtifactConsumer - convert artifacts as they are found
+ * into the destination repository.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
+ * role-hint="legacy-converter"
+ * instantiation-strategy="per-lookup"
+ */
+public class LegacyConverterArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryConverter repositoryConverter;
+
+ private ArtifactRepository destinationRepository;
+
+ public void processArtifact( Artifact artifact, BaseFile file )
+ {
+ try
+ {
+ repositoryConverter.convert( artifact, destinationRepository );
+ }
+ catch ( RepositoryConversionException e )
+ {
+ getLogger().error(
+ "Unable to convert artifact " + artifact + " to destination repository "
+ + destinationRepository, e );
+ }
+ }
+
+ public void processFileProblem( BaseFile path, String message )
+ {
+ getLogger().error( "Artifact Build Failure on " + path + " : " + message );
+ // TODO: report this to the ConversionListener?
+ }
+
+ public ArtifactRepository getDestinationRepository()
+ {
+ return destinationRepository;
+ }
+
+ public void setDestinationRepository( ArtifactRepository destinationRepository )
+ {
+ this.destinationRepository = destinationRepository;
+ }
+
+ public String getName()
+ {
+ return "Legacy Artifact Converter Consumer";
+ }
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ public void addConversionListener( ConversionListener listener )
+ {
+ repositoryConverter.addConversionListener( listener );
+ }
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ public void removeConversionListener( ConversionListener listener )
+ {
+ repositoryConverter.removeConversionListener( listener );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.legacy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Convert an entire repository.
+ *
+ * @author Jason van Zyl
+ */
+public interface LegacyRepositoryConverter
+{
+ String ROLE = LegacyRepositoryConverter.class.getName();
+
+ /**
+ * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
+ * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
+ *
+ * @param legacyRepositoryDirectory the directory of the legacy repository.
+ * @param destinationRepositoryDirectory the directory of the modern repository.
+ * @param fileExclusionPatterns the list of patterns to exclude from the conversion.
+ * @param includeSnapshots true to include snapshots in conversion or not.
+ * @throws RepositoryConversionException
+ */
+ void convertLegacyRepository( File legacyRepositoryDirectory, File destinationRepositoryDirectory,
+ List fileExclusionPatterns, boolean includeSnapshots )
+ throws RepositoryConversionException;
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ void addConversionListener( ConversionListener listener );
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ void removeConversionListener( ConversionListener listener );
+}
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter" );
+ //$JUnit-BEGIN$
+ suite.addTest( org.apache.maven.archiva.converter.transaction.AllTests.suite() );
+ suite.addTestSuite( RepositoryConverterTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * MockConversionListener
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MockConversionListener
+ implements ConversionListener
+{
+ private Map warnings = new HashMap();
+
+ private Map errors = new HashMap();
+
+ private Map exceptions = new HashMap();
+
+ private List processed = new ArrayList();
+
+ private List repositories = new ArrayList();
+
+ public void conversionEvent( ConversionEvent event )
+ {
+ switch ( event.getType() )
+ {
+ case ConversionEvent.STARTED:
+ addUnique( repositories, event.getRepository() );
+ break;
+ case ConversionEvent.PROCESSED:
+ addUnique( processed, event.getArtifact() );
+ break;
+ case ConversionEvent.WARNING:
+ if ( event.getException() != null )
+ {
+ addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
+ }
+
+ if ( event.getMessage() != null )
+ {
+ addObjectList( warnings, toKey( event.getArtifact() ), event.getMessage() );
+ }
+ break;
+ case ConversionEvent.ERROR:
+ if ( event.getException() != null )
+ {
+ addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
+ }
+
+ if ( event.getMessage() != null )
+ {
+ addObjectList( errors, toKey( event.getArtifact() ), event.getMessage() );
+ }
+ break;
+ case ConversionEvent.FINISHED:
+ addUnique( repositories, event.getRepository() );
+ break;
+ }
+ }
+
+ public String toKey( Artifact artifact )
+ {
+ return StringUtils.defaultString( artifact.getGroupId() ) + ":"
+ + StringUtils.defaultString( artifact.getArtifactId() ) + ":"
+ + StringUtils.defaultString( artifact.getVersion() ) + ":" + StringUtils.defaultString( artifact.getType() )
+ + ":" + StringUtils.defaultString( artifact.getClassifier() );
+ }
+
+ private void addObjectList( Map map, String key, Object value )
+ {
+ List objlist = (List) map.get( key );
+ if ( objlist == null )
+ {
+ objlist = new ArrayList();
+ }
+
+ objlist.add( value );
+
+ map.put( key, objlist );
+ }
+
+ private void addUnique( Collection collection, Object obj )
+ {
+ if ( !collection.contains( obj ) )
+ {
+ collection.add( obj );
+ }
+ }
+
+ public Map getErrors()
+ {
+ return errors;
+ }
+
+ public Map getExceptions()
+ {
+ return exceptions;
+ }
+
+ public List getProcessed()
+ {
+ return processed;
+ }
+
+ public List getRepositories()
+ {
+ return repositories;
+ }
+
+ public Map getWarnings()
+ {
+ return warnings;
+ }
+
+ private int getObjectListCount( Map map )
+ {
+ int count = 0;
+ for ( Iterator it = map.values().iterator(); it.hasNext(); )
+ {
+ List objList = (List) it.next();
+ count += objList.size();
+ }
+ return count;
+ }
+
+ public int getWarningMessageCount()
+ {
+ return getObjectListCount( warnings );
+ }
+
+ public int getErrorMessageCount()
+ {
+ return getObjectListCount( errors );
+ }
+}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
import java.util.regex.Matcher;
/**
private ArtifactFactory artifactFactory;
- private ReportingDatabase reportingDatabase;
-
private static final int SLEEP_MILLIS = 100;
private I18N i18n;
ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
File sourceBase = getTestFile( "src/test/source-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+ sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+ null );
layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
File targetBase = getTestFile( "target/test-target-repository" );
copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
- targetRepository =
- factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null );
+ targetRepository = factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null,
+ null );
repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
i18n = (I18N) lookup( I18N.ROLE );
+ }
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ protected void tearDown()
+ throws Exception
+ {
+ super.tearDown();
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
{
if ( !destination.exists() && !destination.mkdirs() )
{
- throw new IOException(
- "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+ throw new IOException( "Could not create destination directory '"
+ + destination.getAbsolutePath() + "'." );
}
copyDirectoryStructure( file, destination );
}
Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
artifactFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
assertTrue( "Check artifact created", artifactFile.exists() );
assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
{
Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
//checkSuccess(); --> commented until MNG-2100 is fixed
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check if relocated artifact created", artifactFile.exists() );
- assertTrue( "Check if relocated artifact matches",
- FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+ assertTrue( "Check if relocated artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 2 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
Artifact artifact = createArtifact( "test", "v4artifact", version );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
public void testMavenOnePluginConversion()
throws Exception
{
- Artifact artifact =
- createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
- artifact.setFile(
- new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ Artifact artifact = createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0",
+ "maven-plugin" );
+ artifact.setFile( new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
+ repositoryConverter.convert( artifact, targetRepository );
// There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
// the plugin is being coverted correctly.
//checkSuccess();
assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
/*
- The POM isn't needed for Maven 1.x plugins but the raw conversion for
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
- compareFiles( expectedPomFile, pomFile );
- */
+ The POM isn't needed for Maven 1.x plugins but the raw conversion for
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+ compareFiles( expectedPomFile, pomFile );
+ */
}
public void testV3TimestampedSnapshotPomConvert()
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
// test that a POM is not created when there was none at the source
Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 1 );
+
+ assertHasWarningReason( listener, getI18nString( "warning.missing.pom" ) );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.md5" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.sha1" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
compareFiles( sourceFile, targetFile );
compareFiles( sourcePomFile, targetPomFile );
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
compareFiles( sourceFile, targetFile );
compareFiles( sourcePomFile, targetPomFile );
assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertTrue( "Check metadata created", metadataFile.exists() );
}
File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
assertTrue( "Check source file exists", sourceFile.exists() );
assertTrue( "Check source POM exists", sourcePomFile.exists() );
assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ List messages = (List) listener.getErrors().get( listener.toKey( artifact ));
+ assertNotNull("Should have error messages.");
+
+ boolean found = false;
String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
- assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
+ for ( Iterator it = messages.iterator(); it.hasNext(); )
+ {
+ String reason = (String) it.next();
+ if( reason.matches( pattern ) )
+ {
+ found = true;
+ break;
+ }
+ }
+
+ assertTrue( "Check failure message.", found );
assertFalse( "check artifact rolled back", artifactFile.exists() );
assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
- repositoryConverter.convert( artifacts, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifacts, targetRepository );
+ checkCounts( listener, 0, 0 );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
artifact = createPomArtifact( artifact );
File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile =
- getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId()
+ + ".pom" );
assertTrue( "Check POM created", pomFile.exists() );
compareFiles( expectedPomFile, pomFile );
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.artifactMetadata.versions" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
// test artifact level metadata is merged when it already exists on successful conversion
Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
-
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
compareFiles( sourcePomFile, pomFile );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
- sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(),
- targetRepository.getLayout(), null, null );
+ sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), targetRepository
+ .getLayout(), null, null );
Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
try
{
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
fail( "Should have failed trying to convert within the same repository" );
}
catch ( RepositoryConversionException e )
return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
}
- private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
- String type )
+ private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, String type )
{
Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
artifact.setBaseVersion( baseVersion );
private Artifact createPomArtifact( Artifact artifact )
{
- return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(),
- artifact.getVersion(), "pom" );
+ return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact
+ .getVersion(), "pom" );
}
private static void compareFiles( File expectedPomFile, File pomFile )
{
String expectedContent = normalizeString( FileUtils.readFileToString( expectedPomFile, null ) );
String targetContent = normalizeString( FileUtils.readFileToString( pomFile, null ) );
- assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent,
- targetContent );
+ assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, targetContent );
}
private static String normalizeString( String path )
return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
}
- private void checkSuccess()
+ private void checkSuccess(MockConversionListener listener)
{
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ checkCounts( listener, 0, 0 );
}
- private void checkFailure()
+ private void checkFailure(MockConversionListener listener)
{
- assertEquals( "check num errors", 1, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ checkCounts( listener, 1, 0 );
+ }
+
+ private void checkCounts( MockConversionListener listener, int failures, int warnings )
+ {
+ int actualFailures = listener.getErrorMessageCount();
+ int actualWarnings = listener.getWarningMessageCount();
+
+ if ( ( failures != actualFailures ) || ( warnings != actualWarnings ) )
+ {
+ fail( "Check Results Counts expected:<" + failures + "," + warnings + "> but was:<" + actualFailures + ","
+ + actualWarnings + ">" );
+ }
}
private String getI18nString( String key )
{
return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
}
-
- private Result getFailure()
+
+ private void assertHasWarningReason( MockConversionListener listener, String reason )
{
- ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
- return (Result) artifact.getFailures().get( 0 );
+ assertHasMessage( listener.getWarnings(), "warning", reason );
}
- private Result getWarning()
+ private void assertHasErrorReason( MockConversionListener listener, String reason )
{
- ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
- return (Result) artifact.getWarnings().get( 0 );
+ assertHasMessage( listener.getErrors(), "error", reason );
+ }
+
+ private void assertHasMessage( Map map, String type, String message )
+ {
+ if ( ( map == null ) || ( map.isEmpty() ) )
+ {
+ fail( "No " + type + "s captured, expected " + type + " <" + message + ">" );
+ }
+
+ // Attempt to find the message ...
+ for ( Iterator it = map.values().iterator(); it.hasNext(); )
+ {
+ List msgList = (List) it.next();
+
+ if ( msgList.contains( message ) )
+ {
+ // Found it!
+ return;
+ }
+ }
+
+ // Didn't find it! whoops ...
+ for ( Iterator it = map.entrySet().iterator(); it.hasNext(); )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String key = (String) entry.getKey();
+ List msgList = (List) entry.getValue();
+
+ System.err.println( " Artifact: " + key );
+
+ for ( Iterator itMsgs = msgList.iterator(); itMsgs.hasNext(); )
+ {
+ String msg = (String) itMsgs.next();
+ System.err.println( " " + msg );
+ }
+ }
+
+ fail( "Unable to find " + type + " reason <" + message + "> in any artifact." );
}
private void createModernSourceRepository()
ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
File sourceBase = getTestFile( "src/test/source-modern-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+ sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+ null );
}
}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter.transaction" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( CreateFileEventTest.class );
+ suite.addTestSuite( CopyFileEventTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
- -->
+-->
<component-set>
<components>
+
<component>
<role>org.apache.maven.archiva.converter.RepositoryConverter</role>
<implementation>org.apache.maven.archiva.converter.DefaultRepositoryConverter</implementation>
<role>org.apache.maven.artifact.factory.ArtifactFactory</role>
<field-name>artifactFactory</field-name>
</requirement>
- <requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
<requirement>
<role>org.codehaus.plexus.i18n.I18N</role>
<field-name>i18n</field-name>
<role>org.apache.maven.artifact.factory.ArtifactFactory</role>
<field-name>artifactFactory</field-name>
</requirement>
- <requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
<requirement>
<role>org.codehaus.plexus.i18n.I18N</role>
<field-name>i18n</field-name>
</requirement>
</requirements>
</component>
+
+
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+
+ <configuration>
+ <!-- Database Configuration -->
+ <driverName>org.hsqldb.jdbcDriver</driverName>
+ <url>jdbc:hsqldb:mem:TESTDB</url>
+ <userName>sa</userName>
+ <password></password>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>javax.jdo.PersistenceManagerFactoryClass</name>
+ <value>org.jpox.PersistenceManagerFactoryImpl</value>
+ </property>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ </otherProperties>
+ </configuration>
+ </component>
</components>
</component-set>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-configuration</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-converter</artifactId>
- </dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-discoverer</artifactId>
<version>1.0-alpha-1</version>
<scope>test</scope>
</dependency>
+ <!-- TEST DEPS -->
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.7.3.3</version>
+ <scope>test</scope>
+ </dependency>
<!-- needed for PlexusTestCase -->
<dependency>
<groupId>org.codehaus.plexus</groupId>
<build>
<plugins>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>cobertura-maven-plugin</artifactId>
- <!-- TODO! add unit tests -->
- <configuration>
- <instrumentation>
- <excludes>
- <exclude>**/**</exclude>
- </excludes>
- </instrumentation>
- </configuration>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>merge</id>
+ <goals>
+ <goal>merge-descriptors</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+ <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
</plugin>
</plugins>
</build>
+++ /dev/null
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * ManagedArtifact
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedArtifact
-{
- private String repositoryId;
-
- private Artifact artifact;
-
- private String path;
-
- protected Map attached;
-
- public ManagedArtifact( String repoId, Artifact artifact, String path )
- {
- super();
- this.repositoryId = repoId;
- this.artifact = artifact;
- this.path = path;
- this.attached = new HashMap();
- }
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public String getRepositoryId()
- {
- return repositoryId;
- }
-
- public Map getAttached()
- {
- return attached;
- }
-
- public void setAttached( Map attached )
- {
- this.attached = attached;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * ManagedArtifactTypes - provides place to test an unknown artifact type.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedArtifactTypes
-{
- public static final int GENERIC = 0;
-
- public static final int JAVA = 1;
-
- public static final int EJB = 2;
-
- private static List javaArtifacts;
-
- private static List ejbArtifacts;
-
- static
- {
- javaArtifacts = new ArrayList();
- javaArtifacts.add( "jar" );
- javaArtifacts.add( "war" );
- javaArtifacts.add( "sar" );
- javaArtifacts.add( "rar" );
- javaArtifacts.add( "ear" );
-
- ejbArtifacts = new ArrayList();
- ejbArtifacts.add( "ejb" );
- ejbArtifacts.add( "ejb-client" );
- }
-
- public static int whichType( String type )
- {
- if ( StringUtils.isBlank( type ) )
- {
- // TODO: is an empty type even possible?
- return GENERIC;
- }
-
- type = type.toLowerCase();
-
- if ( ejbArtifacts.contains( type ) )
- {
- return EJB;
- }
-
- if ( javaArtifacts.contains( type ) )
- {
- return JAVA;
- }
-
- return GENERIC;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedEjbArtifact
- extends ManagedJavaArtifact
-{
- public static final String CLIENT = "client";
-
- public ManagedEjbArtifact( String repoId, Artifact artifact, String path )
- {
- super( repoId, artifact, path );
- }
-
- public String getClientPath()
- {
- return (String) super.attached.get( CLIENT );
- }
-
- public void setClientPath( String clientPath )
- {
- super.attached.put( CLIENT, clientPath );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.artifact;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source
- * reference jars.
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ManagedJavaArtifact
- extends ManagedArtifact
-{
- public static final String JAVADOC = "javadoc";
-
- public static final String SOURCES = "sources";
-
- public ManagedJavaArtifact( String repoId, Artifact artifact, String path )
- {
- super( repoId, artifact, path );
- }
-
- public String getJavadocPath()
- {
- return (String) super.attached.get( JAVADOC );
- }
-
- public void setJavadocPath( String javadocPath )
- {
- super.attached.put( JAVADOC, javadocPath );
- }
-
- public String getSourcesPath()
- {
- return (String) super.attached.get( SOURCES );
- }
-
- public void setSourcesPath( String sourcesPath )
- {
- super.attached.put( SOURCES, sourcesPath );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.InvalidArtifactRTException;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+
+import java.util.Collections;
+
+/**
+ * ArtifactHealthConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ * role-hint="artifact-health"
+ * instantiation-strategy="per-lookup"
+ */
+public class ArtifactHealthConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
+ /**
+ * @plexus.requirement role-hint="health"
+ */
+ private ReportGroup health;
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ public void processArtifact( Artifact artifact, BaseFile file )
+ {
+ Model model = null;
+ try
+ {
+ Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+ .getArtifactId(), artifact.getVersion() );
+ MavenProject project = projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
+
+ model = project.getModel();
+ }
+ catch ( InvalidArtifactRTException e )
+ {
+ database.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
+ }
+ catch ( ProjectBuildingException e )
+ {
+ database.addWarning( artifact, null, null, "Error reading project model: " + e );
+ }
+
+ database.remove( artifact );
+ health.processArtifact( artifact, model );
+ }
+
+ public void processFileProblem( BaseFile path, String message )
+ {
+ /* do nothing here (yet) */
+ // TODO: store build failure into database?
+ }
+
+ public String getName()
+ {
+ return "Artifact Health Consumer";
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexer.RepositoryIndexException;
+import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+
+/**
+ * IndexArtifactConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ * role-hint="index-artifact"
+ * instantiation-strategy="per-lookup"
+ */
+public class IndexArtifactConsumer
+ extends GenericArtifactConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory indexFactory;
+
+ /**
+ * @plexus.requirement role-hint="standard"
+ */
+ private RepositoryIndexRecordFactory recordFactory;
+
+ /**
+ * Configuration store.
+ *
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
+
+ private RepositoryArtifactIndex index;
+
+ public boolean init( ArtifactRepository repository )
+ {
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ File indexPath = new File( configuration.getIndexPath() );
+
+ index = indexFactory.createStandardIndex( indexPath );
+
+ return super.init( repository );
+ }
+
+ public void processArtifact( Artifact artifact, BaseFile file )
+ {
+ try
+ {
+ index.indexArtifact( artifact, recordFactory );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ getLogger().warn( "Unable to index artifact " + artifact, e );
+ }
+ }
+
+ public void processFileProblem( BaseFile path, String message )
+ {
+
+ }
+
+ public String getName()
+ {
+ return "Index Artifact Consumer";
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.consumers;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.GenericRepositoryMetadataConsumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * RepositoryMetadataHealthConsumer
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer"
+ * role-hint="metadata-health"
+ * instantiation-strategy="per-lookup"
+ */
+public class RepositoryMetadataHealthConsumer
+ extends GenericRepositoryMetadataConsumer
+{
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
+ /**
+ * @plexus.requirement role-hint="health"
+ */
+ private ReportGroup health;
+
+ public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file )
+ {
+ MetadataResults results = database.getMetadataResults( metadata );
+ database.clearResults( results );
+
+ health.processMetadata( metadata, repository );
+ }
+
+ public void processFileProblem( BaseFile path, String message )
+ {
+
+ }
+
+ public String getName()
+ {
+ return "RepositoryMetadata Health Consumer";
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.converter.RepositoryConverter;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- * @plexus.component
- * @todo turn this into a general conversion component and hide all this crap here.
- * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
- */
-public class DefaultLegacyRepositoryConverter
- implements LegacyRepositoryConverter
-{
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactDiscoverer artifactDiscoverer;
-
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactRepositoryLayout legacyLayout;
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private ArtifactRepositoryLayout defaultLayout;
-
- /**
- * @plexus.requirement
- */
- private ArtifactRepositoryFactory artifactRepositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private RepositoryConverter repositoryConverter;
-
- /**
- * @plexus.requirement
- */
- private ReportingStore reportingStore;
-
- /**
- * @plexus.requirement role-hint="health"
- */
- private ReportGroup reportGroup;
-
- public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
- List blacklistedPatterns, boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException
- {
- ArtifactRepository legacyRepository;
-
- ArtifactRepository repository;
-
- try
- {
- legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
- legacyRepositoryDirectory.toURI().toURL().toString(),
- legacyLayout, null, null );
-
- repository = artifactRepositoryFactory.createArtifactRepository( "default",
- repositoryDirectory.toURI().toURL().toString(),
- defaultLayout, null, null );
- }
- catch ( MalformedURLException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
-
- ArtifactFilter filter =
- includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter();
- List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, blacklistedPatterns, filter );
-
- ReportingDatabase reporter;
- try
- {
- reporter = reportingStore.getReportsFromStore( repository, reportGroup );
-
- repositoryConverter.convert( legacyArtifacts, repository, reporter );
-
- reportingStore.storeReports( reporter, repository );
- }
- catch ( ReportingStoreException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.conversion;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- */
-public interface LegacyRepositoryConverter
-{
- String ROLE = LegacyRepositoryConverter.class.getName();
-
- /**
- * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
- * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
- *
- * @param legacyRepositoryDirectory
- * @param repositoryDirectory
- * @throws org.apache.maven.archiva.converter.RepositoryConversionException
- *
- */
- void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, List blacklistedPatterns,
- boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException;
-}
* under the License.
*/
-import org.apache.maven.archiva.artifact.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
/**
* ActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public interface ActiveManagedRepositories
* @param id the ID of the repository.
* @return the ArtifactRepository associated with the provided ID, or null if none found.
*/
- ArtifactRepository getArtifactRepository( String id );
+ public ArtifactRepository getArtifactRepository( String id );
- List getAllArtifactRepositories();
+ /**
+ * Get the List of active managed repositories as a List of {@link ArtifactRepository} objects.
+ *
+ * @return the list of ArtifactRepository objects.
+ */
+ public List /*<ArtifactRepository>*/getAllArtifactRepositories();
RepositoryConfiguration getRepositoryConfiguration( String id );
+ /**
+ * Providing only a groupId, artifactId, and version, return the MavenProject that
+ * is found, in any managed repository.
+ *
+ * @param groupId the groupId to search for
+ * @param artifactId the artifactId to search for
+ * @param version the version to search for
+ * @return the MavenProject from the provided parameters.
+ * @throws ProjectBuildingException if there was a problem building the maven project object.
+ */
MavenProject findProject( String groupId, String artifactId, String version )
throws ProjectBuildingException;
ManagedArtifact findArtifact( String groupId, String artifactId, String version, String type );
ManagedArtifact findArtifact( Artifact artifact );
+
+ /**
+ * Obtain the last data refresh timestamp for all Managed Repositories.
+ *
+ * @return the last data refresh timestamp.
+ */
+ long getLastDataRefreshTime();
+
+ /**
+ * Tests to see if there needs to be a data refresh performed.
+ *
+ * The only valid scenario is if 1 or more repositories have not had their data refreshed ever.
+ *
+ * @return true if there needs to be a data refresh.
+ */
+ boolean needsDataRefresh();
}
*/
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.artifact.ManagedArtifact;
-import org.apache.maven.archiva.artifact.ManagedArtifactTypes;
-import org.apache.maven.archiva.artifact.ManagedEjbArtifact;
-import org.apache.maven.archiva.artifact.ManagedJavaArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifactTypes;
+import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
/**
* DefaultActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories"
*/
repositories = repositoryFactory.createRepositories( this.configuration );
localRepository = repositoryFactory.createLocalRepository( this.configuration );
+
}
private ManagedArtifact createManagedArtifact( ArtifactRepository repository, Artifact artifact, File f )
{
if ( propertyName.startsWith( "repositories" ) || propertyName.startsWith( "localRepository" ) )
{
- getLogger().debug( "Triggering managed repository configuration change with " + propertyName + " set to " +
- propertyValue );
+ getLogger().debug(
+ "Triggering managed repository configuration change with " + propertyName + " set to "
+ + propertyValue );
configureSelf( archivaConfiguration.getConfiguration() );
}
else
getLogger().debug( "Not triggering managed repository configuration change with " + propertyName );
}
}
+
+ public long getLastDataRefreshTime()
+ {
+ long lastDataRefreshTime = 0;
+
+ for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+ {
+ ArtifactRepository repository = (ArtifactRepository) i.next();
+
+ DiscovererStatistics stats = new DiscovererStatistics( repository );
+ if ( stats.getTimestampFinished() > lastDataRefreshTime )
+ {
+ lastDataRefreshTime = stats.getTimestampFinished();
+ }
+ }
+
+ return lastDataRefreshTime;
+ }
+
+ public boolean needsDataRefresh()
+ {
+ for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+ {
+ ArtifactRepository repository = (ArtifactRepository) i.next();
+
+ DiscovererStatistics stats = new DiscovererStatistics( repository );
+ if ( stats.getTimestampFinished() <= 0 )
+ {
+ // Found a repository that has NEVER had it's data walked.
+ return true;
+ }
+ }
+
+ return false;
+ }
}
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.quartz.JobDetail;
import org.quartz.SchedulerException;
-import java.io.File;
import java.text.ParseException;
/**
private Scheduler scheduler;
/**
- * @plexus.requirement role-hint="indexer"
+ * @plexus.requirement role-hint="data-refresh"
*/
- private TaskQueue indexerQueue;
-
- /**
- * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
- private IndexerTaskExecutor indexerTaskExecutor;
+ private TaskQueue datarefreshQueue;
/**
* @plexus.requirement
*/
private ArchivaConfiguration archivaConfiguration;
-
+
/**
* @plexus.requirement
*/
- private RepositoryArtifactIndexFactory indexFactory;
+ private ActiveManagedRepositories activeRepositories;
private static final String DISCOVERER_GROUP = "DISCOVERER";
- private static final String INDEXER_JOB = "indexerTask";
+ private static final String DATA_REFRESH_JOB = "dataRefreshTask";
public void start()
throws StartingException
try
{
- scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+ scheduleJobs( configuration.getDataRefreshCronExpression() );
}
catch ( ParseException e )
{
- throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e );
+ throw new StartingException( "Invalid configuration: " + configuration.getDataRefreshCronExpression(), e );
}
catch ( SchedulerException e )
{
}
}
- private void scheduleJobs( String indexPath, String indexerCronExpression )
+ private void scheduleJobs( String indexerCronExpression )
throws ParseException, SchedulerException
{
- if ( indexPath != null )
- {
- JobDetail jobDetail = createJobDetail( INDEXER_JOB );
+ JobDetail jobDetail = createJobDetail( DATA_REFRESH_JOB );
- getLogger().info( "Scheduling indexer: " + indexerCronExpression );
- CronTrigger trigger = new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
- scheduler.scheduleJob( jobDetail, trigger );
+ getLogger().info( "Scheduling data-refresh: " + indexerCronExpression );
+ CronTrigger trigger = new CronTrigger( DATA_REFRESH_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
+ scheduler.scheduleJob( jobDetail, trigger );
- try
- {
- queueNowIfNeeded();
- }
- catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
- {
- getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
- }
+ try
+ {
+ queueNowIfNeeded();
}
- else
+ catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
{
- getLogger().info( "Not scheduling indexer - index path is not configured" );
+ getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
}
}
JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
JobDataMap dataMap = new JobDataMap();
- dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
+ dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
jobDetail.setJobDataMap( dataMap );
{
try
{
- scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP );
+ scheduler.unscheduleJob( DATA_REFRESH_JOB, DISCOVERER_GROUP );
}
catch ( SchedulerException e )
{
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
- if ( "indexPath".equals( propertyName ) || "indexerCronExpression".equals( propertyName ) )
+ if ( "dataRefreshCronExpression".equals( propertyName ) )
{
getLogger().debug( "Restarting task scheduler with new configuration after property change: " +
propertyName + " to " + propertyValue );
try
{
Configuration configuration = archivaConfiguration.getConfiguration();
- scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+ scheduleJobs( configuration.getDataRefreshCronExpression() );
}
catch ( ParseException e )
{
}
}
- public void runIndexer()
- throws org.apache.maven.archiva.scheduler.TaskExecutionException
+ public void runDataRefresh()
+ throws TaskExecutionException
{
- IndexerTask task = new IndexerTask();
- task.setJobName( "INDEX_INIT" );
+ DataRefreshTask task = new DataRefreshTask();
+ task.setJobName( "DATA_REFRESH_INIT" );
try
{
- indexerQueue.put( task );
+ datarefreshQueue.put( task );
}
catch ( TaskQueueException e )
{
- throw new org.apache.maven.archiva.scheduler.TaskExecutionException( e.getMessage(), e );
+ throw new TaskExecutionException( e.getMessage(), e );
}
}
public void queueNowIfNeeded()
- throws org.codehaus.plexus.taskqueue.execution.TaskExecutionException
+ throws TaskExecutionException
{
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- try
+ if ( activeRepositories.needsDataRefresh() )
{
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- if ( !artifactIndex.exists() )
- {
- runIndexer();
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( org.apache.maven.archiva.scheduler.TaskExecutionException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
+ runDataRefresh();
}
}
* under the License.
*/
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.scheduler.AbstractJob;
import org.codehaus.plexus.taskqueue.TaskQueue;
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
setJobDataMap( dataMap );
- TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+ TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
- RepositoryTask task = new IndexerTask();
+ RepositoryTask task = new DataRefreshTask();
task.setJobName( context.getJobDetail().getName() );
try
{
- if ( indexerQueue.getQueueSnapshot().size() == 0 )
+ if ( taskQueue.getQueueSnapshot().size() == 0 )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else
{
if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
{
- //do not queue anymore, policy is to skip
+ // do not queue anymore, policy is to skip
}
}
}
* under the License.
*/
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+
/**
* The component that takes care of scheduling in the application.
*
*/
String ROLE = RepositoryTaskScheduler.class.getName();
- void runIndexer()
+ void runDataRefresh()
throws TaskExecutionException;
-}
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring during task execution.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class TaskExecutionException
- extends Exception
-{
- public TaskExecutionException( String message, Throwable t )
- {
- super( message, t );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Mutable list of consumer for the Data Refresh.
+ *
+ * NOTE: This class only exists to minimize the requirements of manual component management.
+ * This approach allows for a small and simple component definition in the application.xml
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers"
+ */
+public class DataRefreshConsumers
+{
+ /**
+ * @plexus.configuration
+ */
+ private List consumerNames;
+
+ public List getConsumerNames()
+ {
+ if ( consumerNames == null )
+ {
+ consumerNames = new ArrayList();
+ consumerNames.add( "index-artifact" );
+ consumerNames.add( "artifact-health" );
+ consumerNames.add( "metadata-health" );
+ }
+
+ return consumerNames;
+ }
+
+ public Iterator iterator()
+ {
+ return getConsumerNames().iterator();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.consumers.ConsumerException;
+import org.apache.maven.archiva.common.consumers.ConsumerFactory;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.Discoverer;
+import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.taskqueue.Task;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DataRefreshExecutor
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
+ * role-hint="data-refresh"
+ */
+public class DataRefreshExecutor
+ extends AbstractLogEnabled
+ implements TaskExecutor
+{
+ /**
+ * Configuration store.
+ *
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repoFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private DataRefreshConsumers consumerNames;
+
+ /**
+ * @plexus.requirement
+ */
+ private Discoverer discoverer;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConsumerFactory consumerFactory;
+
+ public void executeTask( Task task )
+ throws TaskExecutionException
+ {
+ DataRefreshTask indexerTask = (DataRefreshTask) task;
+
+ getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
+
+ execute();
+ }
+
+ public void execute()
+ throws TaskExecutionException
+ {
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ List consumers = new ArrayList();
+
+ for ( Iterator it = consumerNames.iterator(); it.hasNext(); )
+ {
+ String name = (String) it.next();
+ try
+ {
+ Consumer consumer = consumerFactory.createConsumer( name );
+ consumers.add( consumer );
+ }
+ catch ( ConsumerException e )
+ {
+ getLogger().warn( e.getMessage(), e );
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+ }
+
+ long time = System.currentTimeMillis();
+
+ for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+ {
+ RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+ if ( !repositoryConfiguration.isIndexed() )
+ {
+ continue;
+ }
+
+ ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+
+ List filteredConsumers = filterConsumers( consumers, repository );
+
+ DiscovererStatistics lastRunStats = new DiscovererStatistics( repository );
+ try
+ {
+ lastRunStats.load( ".datarefresh" );
+ }
+ catch ( IOException e )
+ {
+ getLogger().info(
+ "Unable to load last run statistics for repository [" + repository.getId() + "]: "
+ + e.getMessage() );
+ }
+
+ try
+ {
+ DiscovererStatistics stats = discoverer
+ .walkRepository( repository, filteredConsumers, repositoryConfiguration.isIncludeSnapshots(),
+ lastRunStats.getTimestampFinished(), null, null );
+
+ stats.dump( getLogger() );
+ }
+ catch ( DiscovererException e )
+ {
+ getLogger().error(
+ "Unable to run data refresh against repository [" + repository.getId() + "]: "
+ + e.getMessage(), e );
+ }
+ }
+
+ time = System.currentTimeMillis() - time;
+
+ getLogger().info( "Finished data refresh process in " + time + "ms." );
+ }
+
+ /**
+ * Not all consumers work with all repositories.
+ * This will filter out those incompatible consumers based on the provided repository.
+ *
+ * @param consumers the initial list of consumers.
+ * @param repository the repository to test consumer against.
+ * @return the filtered list of consumers.
+ */
+ private List filterConsumers( List consumers, ArtifactRepository repository )
+ {
+ List filtered = new ArrayList();
+
+ for ( Iterator it = consumers.iterator(); it.hasNext(); )
+ {
+ Consumer consumer = (Consumer) it.next();
+ if ( consumer.init( repository ) )
+ {
+ // Approved!
+ filtered.add( consumer );
+ }
+ else
+ {
+ getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository );
+ }
+ }
+
+ return filtered;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter;
-import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.filter.ReportingMetadataFilter;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.taskqueue.Task;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Edwin Punzalan
- * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
-public class IndexerTaskExecutor
- extends AbstractLogEnabled
- implements TaskExecutor
-{
- /**
- * Configuration store.
- *
- * @plexus.requirement
- */
- private ArchivaConfiguration archivaConfiguration;
-
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory indexFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repoFactory;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
- */
- private Map artifactDiscoverers;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
- */
- private Map metadataDiscoverers;
-
- /**
- * @plexus.requirement role-hint="standard"
- */
- private RepositoryIndexRecordFactory recordFactory;
-
- /**
- * @plexus.requirement
- */
- private ReportExecutor reportExecutor;
-
- /**
- * @plexus.requirement role-hint="health"
- */
- private ReportGroup reportGroup;
-
- private long lastIndexingTime = 0;
-
- private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
- public long getLastIndexingTime()
- {
- return lastIndexingTime;
- }
-
- public void executeTask( Task task )
- throws TaskExecutionException
- {
- IndexerTask indexerTask = (IndexerTask) task;
-
- getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
-
- execute();
- }
-
- public void execute()
- throws TaskExecutionException
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- execute( configuration, indexPath );
- }
-
- public void executeNowIfNeeded()
- throws TaskExecutionException
- {
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- try
- {
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- if ( !artifactIndex.exists() )
- {
- execute( configuration, indexPath );
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- }
-
- private void execute( Configuration configuration, File indexPath )
- throws TaskExecutionException
- {
- long time = System.currentTimeMillis();
- getLogger().info( "Starting repository indexing process" );
-
- RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath );
-
- try
- {
- Collection keys;
- if ( index.exists() )
- {
- keys = index.getAllRecordKeys();
- }
- else
- {
- keys = Collections.EMPTY_LIST;
- }
-
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
- if ( repositoryConfiguration.isIndexed() )
- {
- List blacklistedPatterns = new ArrayList();
- if ( repositoryConfiguration.getBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
- }
- if ( configuration.getGlobalBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
- }
- boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
- ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
- ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup );
-
- // keep original value in case there is another process under way
- long origStartTime = reporter.getStartTime();
- reporter.setStartTime( System.currentTimeMillis() );
-
- // Discovery process
- String layoutProperty = repositoryConfiguration.getLayout();
- ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
- AndArtifactFilter filter = new AndArtifactFilter();
- filter.add( new IndexRecordExistsArtifactFilter( keys ) );
- if ( !includeSnapshots )
- {
- filter.add( new SnapshotArtifactFilter() );
- }
-
- // Save some memory by not tracking paths we won't use
- // TODO: Plexus CDC should be able to inject this configuration
- discoverer.setTrackOmittedPaths( false );
-
- getLogger().info( "Searching repository " + repositoryConfiguration.getName() );
- List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
- if ( !artifacts.isEmpty() )
- {
- getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" );
-
- // Work through these in batches, then flush the project cache.
- for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
- {
- int end = j + ARTIFACT_BUFFER_SIZE;
- List currentArtifacts =
- artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
- // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
- // run the reports. Done intermittently to avoid losing track of what is indexed since
- // that is what the filter is based on.
- reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository );
-
- index.indexArtifacts( currentArtifacts, recordFactory );
-
- // MRM-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
- // around that. TODO: remove when it is configurable
- flushProjectBuilderCacheHack();
- }
- }
-
- MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter );
-
- MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers
- .get( layoutProperty );
- List metadata =
- metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter );
-
- if ( !metadata.isEmpty() )
- {
- getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" );
-
- // run the reports
- reportExecutor.runMetadataReports( reportGroup, metadata, repository );
- }
-
- reporter.setStartTime( origStartTime );
- }
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( DiscovererException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( ReportingStoreException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
-
- time = System.currentTimeMillis() - time;
- lastIndexingTime = System.currentTimeMillis();
- getLogger().info( "Finished repository indexing process in " + time + "ms" );
- }
-
- /**
- * @todo remove when no longer needed (MRM-142)
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- private void flushProjectBuilderCacheHack()
- {
- try
- {
- if ( projectBuilder != null )
- {
- getLogger().info( "projectBuilder is type " + projectBuilder.getClass().getName() );
-
- java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
- f.setAccessible( true );
- Map cache = (Map) f.get( projectBuilder );
- getLogger().info( "projectBuilder.raw is type " + cache.getClass().getName() );
- cache.clear();
-
- f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
- f.setAccessible( true );
- cache = (Map) f.get( projectBuilder );
- getLogger().info( "projectBuilder.processed is type " + cache.getClass().getName() );
- cache.clear();
- }
- }
- catch ( NoSuchFieldException e )
- {
- throw new RuntimeException( e );
- }
- catch ( IllegalAccessException e )
- {
- throw new RuntimeException( e );
- }
- }
-}
--- /dev/null
+package org.apache.maven.archiva.scheduler.task;
+
+/**
+ * DataRefreshTask - task for discovering changes in the repository
+ * and updating all associated data.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshTask
+ implements RepositoryTask
+{
+ private String jobName;
+
+ private String policy;
+
+ public String getJobName()
+ {
+ return jobName;
+ }
+
+ public String getQueuePolicy()
+ {
+ return policy;
+ }
+
+ public void setJobName( String jobName )
+ {
+ this.jobName = jobName;
+ }
+
+ public void setQueuePolicy( String policy )
+ {
+ this.policy = policy;
+ }
+
+ public long getMaxExecutionTime()
+ {
+ return 0;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.task;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Task for discovering changes in the repository and updating the index accordingly.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class IndexerTask
- implements RepositoryTask
-{
- private String jobName;
-
- private String policy;
-
- public long getMaxExecutionTime()
- {
- return 0;
- }
-
- public String getJobName()
- {
- return jobName;
- }
-
- public String getQueuePolicy()
- {
- return policy;
- }
-
- public void setQueuePolicy( String policy )
- {
- this.policy = policy;
- }
-
- public void setJobName( String jobName )
- {
- this.jobName = jobName;
- }
-
-
-}
<component-set>
<components>
+ <!-- TODO: Remove once CDC can handle correct cross-module descriptor creation. -->
+
+ <!-- SNIP:START -->
+
+ <component>
+ <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+ <role-hint>index-artifact</role-hint>
+ <implementation>org.apache.maven.archiva.consumers.IndexArtifactConsumer</implementation>
+ <instantiation-strategy>per-lookup</instantiation-strategy>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ <field-name>artifactFactory</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+ <role-hint>artifact-health</role-hint>
+ <implementation>org.apache.maven.archiva.consumers.ArtifactHealthConsumer</implementation>
+ <instantiation-strategy>per-lookup</instantiation-strategy>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ <field-name>artifactFactory</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.common.consumers.Consumer</role>
+ <role-hint>metadata-health</role-hint>
+ <implementation>org.apache.maven.archiva.consumers.RepositoryMetadataHealthConsumer</implementation>
+ <instantiation-strategy>per-lookup</instantiation-strategy>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ <field-name>artifactFactory</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <!-- SNIP:END -->
+
+ <component>
+ <role>org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers</role>
+ <implementation>org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers</implementation>
+ <description>Mutable list of consumer for the Data Refresh.</description>
+ <configuration>
+ <consumer-names>
+ <consumer-name>index-artifact</consumer-name>
+ <consumer-name>artifact-health</consumer-name>
+ <consumer-name>metadata-health</consumer-name>
+ </consumer-names>
+ </configuration>
+ </component>
+
<component>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
<implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
<lifecycle-handler>plexus-configurable</lifecycle-handler>
<configuration>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
<implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
</requirement>
<requirement>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
</requirement>
</requirements>
<configuration>
- <name>indexer</name>
+ <name>data-refresh</name>
</configuration>
</component>
--- /dev/null
+package org.apache.maven.archiva;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - added to allow IDE users to pull all tests into their tool.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva" );
+ //$JUnit-BEGIN$
+ suite.addTest( org.apache.maven.archiva.repositories.AllTests.suite() );
+ suite.addTest( org.apache.maven.archiva.scheduler.executors.AllTests.suite() );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
+++ /dev/null
-package org.apache.maven.archiva;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.conversion.LegacyRepositoryConverter;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Jason van Zyl
- */
-public class LegacyRepositoryConverterTest
- extends PlexusTestCase
-{
- public void testLegacyRepositoryConversion()
- throws Exception
- {
- File legacyRepositoryDirectory = getTestFile( "src/test/maven-1.x-repository" );
-
- File repositoryDirectory = getTestFile( "target/maven-2.x-repository" );
-
- LegacyRepositoryConverter rm = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE );
-
- rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, null, true );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.repositories;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.repositories" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( DefaultActiveManagedRepositoriesTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
* under the License.
*/
-import org.apache.maven.archiva.artifact.ManagedArtifact;
-import org.apache.maven.archiva.artifact.ManagedEjbArtifact;
-import org.apache.maven.archiva.artifact.ManagedJavaArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact;
import org.codehaus.plexus.PlexusTestCase;
/**
* DefaultActiveManagedRepositoriesTest
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class DefaultActiveManagedRepositoriesTest
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.scheduler.executors" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( DataRefreshExecutorTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler.executors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
+
+import java.io.File;
+
+/**
+ * IndexerTaskExecutorTest
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DataRefreshExecutorTest
+ extends PlexusTestCase
+{
+ private TaskExecutor taskExecutor;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" );
+
+ ArchivaConfiguration archivaConfiguration =
+ (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
+ Configuration configuration = archivaConfiguration.getConfiguration();
+
+ File indexPath = new File( configuration.getIndexPath() );
+ if ( indexPath.exists() )
+ {
+ FileUtils.deleteDirectory( indexPath );
+ }
+ }
+
+ public void testExecutor()
+ throws TaskExecutionException
+ {
+ taskExecutor.executeTask( new TestDataRefreshTask() );
+ }
+
+ class TestDataRefreshTask
+ extends DataRefreshTask
+ {
+ public String getJobName()
+ {
+ return "TestDataRefresh";
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.scheduler.executors;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
-import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
-
-import java.io.File;
-
-/**
- * IndexerTaskExecutorTest
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class IndexerTaskExecutorTest
- extends PlexusTestCase
-{
- private TaskExecutor taskExecutor;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "indexer" );
-
- ArchivaConfiguration archivaConfiguration =
- (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
- if ( indexPath.exists() )
- {
- FileUtils.deleteDirectory( indexPath );
- }
- }
-
- public void testIndexer()
- throws TaskExecutionException
- {
- taskExecutor.executeTask( new TestIndexerTask() );
- }
-
- class TestIndexerTask
- extends IndexerTask
- {
- public String getJobName()
- {
- return "TestIndexer";
- }
- }
-}
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
+ <role-hint>commons-configuration</role-hint>
+ <configuration>
+ <properties>
+ <xml fileName="${basedir}/src/test/conf/archiva.xml"
+ config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
+ </properties>
+ </configuration>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+
+ <configuration>
+ <!-- Database Configuration -->
+ <driverName>org.hsqldb.jdbcDriver</driverName>
+ <url>jdbc:hsqldb:mem:TESTDB</url>
+ <userName>sa</userName>
+ <password></password>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>javax.jdo.PersistenceManagerFactoryClass</name>
+ <value>org.jpox.PersistenceManagerFactoryImpl</value>
+ </property>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ </otherProperties>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <role-hint>commons-configuration</role-hint>
- <configuration>
- <properties>
- <xml fileName="${basedir}/src/test/conf/archiva.xml"
- config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
- </components>
-</component-set>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-discoverer</artifactId>
- <name>Archiva Artifact Discoverer</name>
+ <name>Archiva Discoverer</name>
<dependencies>
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-common</artifactId>
+ </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-utils</artifactId>
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for artifact discoverers.
- *
- * @author John Casey
- * @author Brett Porter
- */
-public abstract class AbstractArtifactDiscoverer
- extends AbstractDiscoverer
- implements ArtifactDiscoverer
-{
- /**
- * Standard patterns to exclude from discovery as they are not artifacts.
- */
- private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".index", ".reports/**",
- ".maven/**", "**/*.md5", "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**",
- "*/licenses/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*",
- "**/CHANGELOG*", "**/KEYS*"};
-
- private List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns )
- {
- return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES );
- }
-
- public List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- File repositoryBase = new File( repository.getBasedir() );
-
- List artifacts = new ArrayList();
-
- if ( repositoryBase.exists() )
- {
- List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns );
-
- for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
- {
- String path = (String) i.next();
-
- try
- {
- Artifact artifact = buildArtifactFromPath( path, repository );
-
- if ( filter.include( artifact ) )
- {
- artifacts.add( artifact );
- }
- else
- {
- addExcludedPath( path, "Omitted by filter" );
- }
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( path, e.getMessage() );
- }
- }
- }
- return artifacts;
- }
-
- /**
- * Returns an artifact object that is represented by the specified path in a repository
- *
- * @param path The path that is pointing to an artifact
- * @param repository The repository of the artifact
- * @return Artifact
- * @throws DiscovererException when the specified path does correspond to an artifact
- */
- public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
- throws DiscovererException
- {
- Artifact artifact = buildArtifact( path );
-
- if ( artifact != null )
- {
- artifact.setRepository( repository );
- artifact.setFile( new File( repository.getBasedir(), path ) );
- }
-
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.DirectoryScanner;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Base class for the artifact and metadata discoverers.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDiscoverer
- extends AbstractLogEnabled
- implements Discoverer
-{
- private List kickedOutPaths = new ArrayList();
-
- /**
- * @plexus.requirement
- */
- protected ArtifactFactory artifactFactory;
-
- private static final String[] EMPTY_STRING_ARRAY = new String[0];
-
- private List excludedPaths = new ArrayList();
-
- /**
- * @plexus.configuration default-value="true"
- */
- private boolean trackOmittedPaths;
-
- /**
- * Add a path to the list of files that were kicked out due to being invalid.
- *
- * @param path the path to add
- * @param reason the reason why the path is being kicked out
- */
- protected void addKickedOutPath( String path, String reason )
- {
- if ( trackOmittedPaths )
- {
- kickedOutPaths.add( new DiscovererPath( path, reason ) );
- }
- }
-
- /**
- * Add a path to the list of files that were excluded.
- *
- * @param path the path to add
- * @param reason the reason why the path is excluded
- */
- protected void addExcludedPath( String path, String reason )
- {
- excludedPaths.add( new DiscovererPath( path, reason ) );
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getKickedOutPathsIterator()
- {
- assert trackOmittedPaths;
- return kickedOutPaths.iterator();
- }
-
- protected List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns, String[] includes,
- String[] excludes )
- {
- List allExcludes = new ArrayList();
- allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
- if ( excludes != null )
- {
- allExcludes.addAll( Arrays.asList( excludes ) );
- }
- if ( blacklistedPatterns != null )
- {
- allExcludes.addAll( blacklistedPatterns );
- }
-
- DirectoryScanner scanner = new DirectoryScanner();
-
- scanner.setBasedir( repositoryBase );
-
- if ( includes != null )
- {
- scanner.setIncludes( includes );
- }
- scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
-
- // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
- scanner.scan();
-
- if ( trackOmittedPaths )
- {
- for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
- }
- }
-
- // TODO: this could be a part of the scanner
- List includedPaths = new ArrayList();
- for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- includedPaths.add( path );
- }
-
- return includedPaths;
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getExcludedPathsIterator()
- {
- assert trackOmittedPaths;
- return excludedPaths.iterator();
- }
-
- public void setTrackOmittedPaths( boolean trackOmittedPaths )
- {
- this.trackOmittedPaths = trackOmittedPaths;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Interface for implementation that can discover artifacts within a repository.
- *
- * @author John Casey
- * @author Brett Porter
- * @todo do we want blacklisted patterns in another form? Part of the object construction?
- * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
- * @todo instead of a returned list, should a listener be passed in?
- */
-public interface ArtifactDiscoverer
- extends Discoverer
-{
- String ROLE = ArtifactDiscoverer.class.getName();
-
- /**
- * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
- * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
- * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
- *
- * @param repository the location of the repository
- * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
- * @param filter filter for artifacts to include in the discovered list
- * @return the list of artifacts discovered
- * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
- */
- List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter )
- throws DiscovererException;
-
- /**
- * Build an artifact from a path in the repository
- *
- * @param path the path
- * @return the artifact
- * @throws DiscovererException if the file is not a valid artifact
- * @todo this should be in maven-artifact
- */
- Artifact buildArtifact( String path )
- throws DiscovererException;
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the new repository layout (Maven 2.0+).
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="default"
- */
-public class DefaultArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( path, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
-
- Artifact artifact;
- if ( pathParts.size() >= 4 )
- {
- // maven 2.x path
-
- // the actual artifact filename.
- String filename = (String) pathParts.remove( 0 );
-
- // the next one is the version.
- String version = (String) pathParts.remove( 0 );
-
- // the next one is the artifactId.
- String artifactId = (String) pathParts.remove( 0 );
-
- // the remaining are the groupId.
- Collections.reverse( pathParts );
- String groupId = StringUtils.join( pathParts.iterator(), "." );
-
- String remainingFilename = filename;
- if ( remainingFilename.startsWith( artifactId + "-" ) )
- {
- remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
-
- String classifier = null;
-
- // TODO: use artifact handler, share with legacy discoverer
- String type;
- if ( remainingFilename.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
- }
- else if ( remainingFilename.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
- remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
- }
- else if ( remainingFilename.endsWith( "-test-sources.jar" ) )
- {
- type = "java-source";
- classifier = "test-sources";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - "-test-sources.jar".length() );
- }
- else if ( remainingFilename.endsWith( "-sources.jar" ) )
- {
- type = "java-source";
- classifier = "sources";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
- }
- else
- {
- int index = remainingFilename.lastIndexOf( "." );
- if ( index >= 0 )
- {
- type = remainingFilename.substring( index + 1 );
- remainingFilename = remainingFilename.substring( 0, index );
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- Artifact result;
- if ( classifier == null )
- {
- result =
- artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
- }
- else
- {
- result =
- artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-
- if ( result.isSnapshot() )
- {
- // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
- int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
- if ( classifierIndex >= 0 )
- {
- classifier = remainingFilename.substring( classifierIndex + 1 );
- remainingFilename = remainingFilename.substring( 0, classifierIndex );
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
- type, classifier );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
- Artifact.SCOPE_RUNTIME, type );
- }
-
- // poor encapsulation requires we do this to populate base version
- if ( !result.isSnapshot() )
- {
- throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
- }
- else if ( !result.getBaseVersion().equals( version ) )
- {
- throw new DiscovererException(
- "Built snapshot artifact base version does not match path version: " + result +
- "; should have been version: " + version );
- }
- else
- {
- artifact = result;
- }
- }
- else if ( !remainingFilename.startsWith( version ) )
- {
- throw new DiscovererException( "Built artifact version does not match path version" );
- }
- else if ( !remainingFilename.equals( version ) )
- {
- if ( remainingFilename.charAt( version.length() ) == '-' )
- {
- classifier = remainingFilename.substring( version.length() + 1 );
- artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifier );
- }
- else
- {
- throw new DiscovererException( "Path version does not corresspond to an artifact version" );
- }
- }
- else
- {
- artifact = result;
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not correspond to an artifact" );
- }
- }
- else
- {
- throw new DiscovererException( "Path is too short to build an artifact from" );
- }
-
- return artifact;
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.DirectoryWalker;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Discoverer Implementation.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @plexus.component role="org.apache.maven.archiva.discoverer.Discoverer"
+ */
+public class DefaultDiscoverer
+ extends AbstractLogEnabled
+ implements Discoverer
+{
+ /**
+ * Standard patterns to exclude from discovery as they are usually noise.
+ */
+ private static final String[] STANDARD_DISCOVERY_EXCLUDES = {
+ "bin/**",
+ "reports/**",
+ ".index",
+ ".reports/**",
+ ".maven/**",
+ "**/*snapshot-version",
+ "*/website/**",
+ "*/licences/**",
+ "**/.htaccess",
+ "**/*.html",
+ "**/*.txt",
+ "**/README*",
+ "**/CHANGELOG*",
+ "**/KEYS*" };
+
+ public DefaultDiscoverer()
+ {
+ }
+
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+ throws DiscovererException
+ {
+ return walkRepository( repository, consumers, includeSnapshots, 0, null, null );
+ }
+
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+ boolean includeSnapshots, long onlyModifiedAfterTimestamp,
+ List extraFileExclusions, List extraFileInclusions )
+ throws DiscovererException
+ {
+ // Sanity Check
+
+ if ( repository == null )
+ {
+ throw new IllegalArgumentException( "Unable to operate on a null repository." );
+ }
+
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ throw new UnsupportedOperationException( "Only filesystem repositories are supported." );
+ }
+
+ File repositoryBase = new File( repository.getBasedir() );
+
+ if ( !repositoryBase.exists() )
+ {
+ throw new UnsupportedOperationException( "Unable to scan a repository, directory "
+ + repositoryBase.getAbsolutePath() + " does not exist." );
+ }
+
+ if ( !repositoryBase.isDirectory() )
+ {
+ throw new UnsupportedOperationException( "Unable to scan a repository, path "
+ + repositoryBase.getAbsolutePath() + " is not a directory." );
+ }
+
+ // Setup Includes / Excludes.
+
+ List allExcludes = new ArrayList();
+ List allIncludes = new ArrayList();
+
+ // Exclude all of the SCM patterns.
+ allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
+
+ // Exclude all of the archiva noise patterns.
+ allExcludes.addAll( Arrays.asList( STANDARD_DISCOVERY_EXCLUDES ) );
+
+ if ( !includeSnapshots )
+ {
+ allExcludes.add( "**/*-SNAPSHOT*" );
+ }
+
+ if ( extraFileExclusions != null )
+ {
+ allExcludes.addAll( extraFileExclusions );
+ }
+
+ Iterator it = consumers.iterator();
+ while ( it.hasNext() )
+ {
+ Consumer consumer = (Consumer) it.next();
+
+ /* NOTE: Do not insert the consumer exclusion patterns here.
+ * Exclusion patterns are handled by RepositoryScanner.wantsFile(Consumer, String)
+ *
+ * addUniqueElements( consumer.getExcludePatterns(), allExcludes );
+ */
+ addUniqueElements( consumer.getIncludePatterns(), allIncludes );
+ }
+
+ if ( extraFileInclusions != null )
+ {
+ allIncludes.addAll( extraFileInclusions );
+ }
+
+ // Setup Directory Walker
+
+ DirectoryWalker dirWalker = new DirectoryWalker();
+
+ dirWalker.setBaseDir( repositoryBase );
+
+ dirWalker.setIncludes( allIncludes );
+ dirWalker.setExcludes( allExcludes );
+
+ // Setup the Scan Instance
+ RepositoryScanner repoScanner = new RepositoryScanner( repository, consumers );
+ repoScanner.setOnlyModifiedAfterTimestamp( onlyModifiedAfterTimestamp );
+
+ repoScanner.setLogger( getLogger() );
+ dirWalker.addDirectoryWalkListener( repoScanner );
+
+ // Execute scan.
+ dirWalker.scan();
+
+ return repoScanner.getStatistics();
+ }
+
+ private void addUniqueElements( List fromList, List toList )
+ {
+ Iterator itFrom = fromList.iterator();
+ while ( itFrom.hasNext() )
+ {
+ Object o = itFrom.next();
+ if ( !toList.contains( o ) )
+ {
+ toList.add( o );
+ }
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * This class gets all the paths that contain the metadata files.
- *
- * @plexus.component role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" role-hint="default"
- */
-public class DefaultMetadataDiscoverer
- extends AbstractDiscoverer
- implements MetadataDiscoverer
-{
- /**
- * Standard patterns to include in discovery of metadata files.
- *
- * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
- * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
- * searching the local metadata in the first place though?
- */
- private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
-
- public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- List metadataFiles = new ArrayList();
-
- File repositoryBase = new File( repository.getBasedir() );
- if ( repositoryBase.exists() )
- {
- List metadataPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns,
- STANDARD_DISCOVERY_INCLUDES, null );
-
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- String metadataPath = (String) i.next();
- try
- {
- RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
- File f = new File( repository.getBasedir(), metadataPath );
- if ( filter.include( metadata, f.lastModified() ) )
- {
- metadataFiles.add( metadata );
- }
- else
- {
- addExcludedPath( metadataPath, "Metadata excluded by filter" );
- }
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( metadataPath, e.getMessage() );
- }
- }
- }
- return metadataFiles;
- }
-
- public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
- throws DiscovererException
- {
- return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
- }
-
- private RepositoryMetadata buildMetadata( String repo, String metadataPath )
- throws DiscovererException
- {
- Metadata m;
- File f = new File( repo, metadataPath );
- try
- {
- Reader reader = new FileReader( f );
- MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
-
- m = metadataReader.read( reader );
- }
- catch ( XmlPullParserException e )
- {
- throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e );
- }
-
- RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
-
- if ( repositoryMetadata == null )
- {
- throw new DiscovererException( "Unable to build a repository metadata from path" );
- }
-
- return repositoryMetadata;
- }
-
- /**
- * Builds a RepositoryMetadata object from a Metadata object and its path.
- *
- * @param m Metadata
- * @param metadataPath path
- * @return RepositoryMetadata if the parameters represent one; null if not
- * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
- */
- private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
- {
- String metaGroupId = m.getGroupId();
- String metaArtifactId = m.getArtifactId();
- String metaVersion = m.getVersion();
-
- // check if the groupId, artifactId and version is in the
- // metadataPath
- // parse the path, in reverse order
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
- // remove the metadata file
- pathParts.remove( 0 );
- Iterator it = pathParts.iterator();
- String tmpDir = (String) it.next();
-
- Artifact artifact = null;
- if ( StringUtils.isNotEmpty( metaVersion ) )
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
- }
-
- // snapshotMetadata
- RepositoryMetadata metadata = null;
- if ( tmpDir != null && tmpDir.equals( metaVersion ) )
- {
- if ( artifact != null )
- {
- metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- }
- }
- else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
- {
- // artifactMetadata
- if ( artifact != null )
- {
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- else
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- }
- else
- {
- String groupDir = "";
- int ctr = 0;
- for ( it = pathParts.iterator(); it.hasNext(); )
- {
- String path = (String) it.next();
- if ( ctr == 0 )
- {
- groupDir = path;
- }
- else
- {
- groupDir = path + "." + groupDir;
- }
- ctr++;
- }
-
- // groupMetadata
- if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
- {
- metadata = new GroupRepositoryMetadata( metaGroupId );
- }
- }
-
- return metadata;
- }
-}
* under the License.
*/
-import java.util.Iterator;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.File;
+import java.util.List;
/**
- * @author Edwin Punzalan
+ * Discoverer - generic discoverer of content in an ArtifactRepository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
*/
public interface Discoverer
{
+ public static final String ROLE = Discoverer.class.getName();
+
/**
- * Get the list of paths kicked out during the discovery process.
- *
- * @return the paths as Strings.
+ * Walk the repository, and report to the consumers the files found.
+ *
+ * Report changes to the appropriate Consumer.
+ *
+ * This is just a convenience method to {@link #walkRepository(ArtifactRepository, List, boolean, long, List, List)}
+ * equivalent to calling <code>walkRepository( repository, consumers, includeSnapshots, 0, null, null );</code>
+ *
+ * @param repository the repository to change.
+ * @param consumers use the provided list of consumers.
+ * @param includeSnapshots true to include snapshots in the walking of this repository.
+ * @return the statistics for this scan.
+ * @throws DiscovererException if there was a fundamental problem with getting the discoverer started.
*/
- Iterator getKickedOutPathsIterator();
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots )
+ throws DiscovererException;
/**
- * Get the list of paths excluded during the discovery process.
- *
- * @return the paths as Strings.
+ * Walk the repository, and report to the consumers the files found.
+ *
+ * Report changes to the appropriate Consumer.
+ *
+ * @param repository the repository to change.
+ * @param consumers use the provided list of consumers.
+ * @param includeSnapshots true to include snapshots in the scanning of this repository.
+ * @param onlyModifiedAfterTimestamp Only report to the consumers, files that have a {@link File#lastModified()})
+ * after the provided timestamp.
+ * @param extraFileExclusions an optional list of file exclusions on the walk.
+ * @param extraFileInclusions an optional list of file inclusions on the walk.
+ * @return the statistics for this scan.
+ * @throws DiscovererException if there was a fundamental problem with getting the discoverer started.
*/
- Iterator getExcludedPathsIterator();
-
- void setTrackOmittedPaths( boolean trackOmittedPaths );
+ public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers,
+ boolean includeSnapshots, long onlyModifiedAfterTimestamp,
+ List extraFileExclusions, List extraFileInclusions )
+ throws DiscovererException;
}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererPath
-{
- /**
- * The path discovered.
- */
- private final String path;
-
- /**
- * A comment about why the path is being processed.
- */
- private final String comment;
-
- public DiscovererPath( String path, String comment )
- {
- this.path = path;
- this.comment = comment;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public String getComment()
- {
- return comment;
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.math.NumberUtils;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.IOUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Properties;
+
+/**
+ * DiscovererStatistics
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DiscovererStatistics
+{
+ private static final String PROP_FILES_CONSUMED = "scan.consumed.files";
+
+ private static final String PROP_FILES_INCLUDED = "scan.included.files";
+
+ private static final String PROP_FILES_SKIPPED = "scan.skipped.files";
+
+ private static final String PROP_TIMESTAMP_STARTED = "scan.started.timestamp";
+
+ private static final String PROP_TIMESTAMP_FINISHED = "scan.finished.timestamp";
+
+ protected long timestampStarted = 0;
+
+ protected long timestampFinished = 0;
+
+ protected long filesIncluded = 0;
+
+ protected long filesConsumed = 0;
+
+ protected long filesSkipped = 0;
+
+ private ArtifactRepository repository;
+
+ public DiscovererStatistics( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ }
+
+ public void load( String filename )
+ throws IOException
+ {
+ File repositoryBase = new File( this.repository.getBasedir() );
+
+ File scanProperties = new File( repositoryBase, filename );
+ FileInputStream fis = null;
+ try
+ {
+ Properties props = new Properties();
+ fis = new FileInputStream( scanProperties );
+ props.load( fis );
+
+ timestampFinished = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_FINISHED ), 0 );
+ timestampStarted = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_STARTED ), 0 );
+ filesIncluded = NumberUtils.toLong( props.getProperty( PROP_FILES_INCLUDED ), 0 );
+ filesConsumed = NumberUtils.toLong( props.getProperty( PROP_FILES_CONSUMED ), 0 );
+ filesSkipped = NumberUtils.toLong( props.getProperty( PROP_FILES_SKIPPED ), 0 );
+ }
+ catch ( IOException e )
+ {
+ reset();
+ throw e;
+ }
+ finally
+ {
+ IOUtil.close( fis );
+ }
+ }
+
+ public void save( String filename )
+ throws IOException
+ {
+ Properties props = new Properties();
+ props.setProperty( PROP_TIMESTAMP_FINISHED, String.valueOf( timestampFinished ) );
+ props.setProperty( PROP_TIMESTAMP_STARTED, String.valueOf( timestampStarted ) );
+ props.setProperty( PROP_FILES_INCLUDED, String.valueOf( filesIncluded ) );
+ props.setProperty( PROP_FILES_CONSUMED, String.valueOf( filesConsumed ) );
+ props.setProperty( PROP_FILES_SKIPPED, String.valueOf( filesSkipped ) );
+
+ File repositoryBase = new File( this.repository.getBasedir() );
+ File statsFile = new File( repositoryBase, filename );
+
+ FileOutputStream fos = null;
+ try
+ {
+ fos = new FileOutputStream( statsFile );
+ props.store( fos, "Last Scan Information, managed by Archiva. DO NOT EDIT" );
+ fos.flush();
+ }
+ finally
+ {
+ IOUtil.close( fos );
+ }
+ }
+
+ public void reset()
+ {
+ timestampStarted = 0;
+ timestampFinished = 0;
+ filesIncluded = 0;
+ filesConsumed = 0;
+ filesSkipped = 0;
+ }
+
+ public long getElapsedMilliseconds()
+ {
+ return timestampFinished - timestampStarted;
+ }
+
+ public long getFilesConsumed()
+ {
+ return filesConsumed;
+ }
+
+ public long getFilesIncluded()
+ {
+ return filesIncluded;
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return repository;
+ }
+
+ public long getTimestampFinished()
+ {
+ return timestampFinished;
+ }
+
+ public long getTimestampStarted()
+ {
+ return timestampStarted;
+ }
+
+ public long getFilesSkipped()
+ {
+ return filesSkipped;
+ }
+
+ public void setTimestampFinished( long timestampFinished )
+ {
+ this.timestampFinished = timestampFinished;
+ }
+
+ public void setTimestampStarted( long timestampStarted )
+ {
+ this.timestampStarted = timestampStarted;
+ }
+
+ public void dump( Logger logger )
+ {
+ logger.info( "----------------------------------------------------" );
+ logger.info( "Scan of Repository: " + repository.getId() );
+ logger.info( " Started : " + toHumanTimestamp( this.getTimestampStarted() ) );
+ logger.info( " Finished: " + toHumanTimestamp( this.getTimestampFinished() ) );
+ // TODO: pretty print ellapsed time.
+ logger.info( " Duration: " + this.getElapsedMilliseconds() + "ms" );
+ logger.info( " Files : " + this.getFilesIncluded() );
+ logger.info( " Consumed: " + this.getFilesConsumed() );
+ logger.info( " Skipped : " + this.getFilesSkipped() );
+ }
+
+ private String toHumanTimestamp( long timestamp )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat();
+ return dateFormat.format( new Date( timestamp ) );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the legacy repository layout (Maven 1.x).
- * Method used to build an artifact object using a relative path from a repository base directory. An artifactId
- * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
- * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
- * they are reserved for version usage.
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="legacy"
- */
-public class LegacyArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- StringTokenizer tokens = new StringTokenizer( path, "/\\" );
-
- Artifact result;
-
- int numberOfTokens = tokens.countTokens();
-
- if ( numberOfTokens == 3 )
- {
- String groupId = tokens.nextToken();
-
- String type = tokens.nextToken();
-
- if ( type.endsWith( "s" ) )
- {
- type = type.substring( 0, type.length() - 1 );
-
- // contains artifactId, version, classifier, and extension.
- String avceGlob = tokens.nextToken();
-
- //noinspection CollectionDeclaredAsConcreteClass
- LinkedList avceTokenList = new LinkedList();
-
- StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
- while ( avceTokenizer.hasMoreTokens() )
- {
- avceTokenList.addLast( avceTokenizer.nextToken() );
- }
-
- String lastAvceToken = (String) avceTokenList.removeLast();
-
- // TODO: share with other discoverer, use artifact handlers instead
- if ( lastAvceToken.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( "sources.jar" ) )
- {
- type = "java-source";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( "javadoc.jar" ) )
- {
- type = "javadoc.jar";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- int extPos = lastAvceToken.lastIndexOf( '.' );
-
- if ( extPos > 0 )
- {
- String ext = lastAvceToken.substring( extPos + 1 );
- if ( type.equals( ext ) || "plugin".equals( type ) )
- {
- lastAvceToken = lastAvceToken.substring( 0, extPos );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- throw new DiscovererException( "Path type does not match the extension" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- // let's discover the version, and whatever's leftover will be either
- // a classifier, or part of the artifactId, depending on position.
- // Since version is at the end, we have to move in from the back.
- Collections.reverse( avceTokenList );
-
- // TODO: this is obscene - surely a better way?
- String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
- "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
- "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
- "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
- "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
- "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
- "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
-
- StringBuffer classifierBuffer = new StringBuffer();
- StringBuffer versionBuffer = new StringBuffer();
-
- boolean firstVersionTokenEncountered = false;
- boolean firstToken = true;
-
- int tokensIterated = 0;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- boolean tokenIsVersionPart = token.matches( validVersionParts );
-
- StringBuffer bufferToUpdate;
-
- // NOTE: logic in code is reversed, since we're peeling off the back
- // Any token after the last versionPart will be in the classifier.
- // Any token UP TO first non-versionPart is part of the version.
- if ( !tokenIsVersionPart )
- {
- if ( firstVersionTokenEncountered )
- {
- //noinspection BreakStatement
- break;
- }
- else
- {
- bufferToUpdate = classifierBuffer;
- }
- }
- else
- {
- firstVersionTokenEncountered = true;
-
- bufferToUpdate = versionBuffer;
- }
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- bufferToUpdate.insert( 0, '-' );
- }
-
- bufferToUpdate.insert( 0, token );
-
- tokensIterated++;
- }
-
- // Now, restore the proper ordering so we can build the artifactId.
- Collections.reverse( avceTokenList );
-
- // if we didn't find a version, then punt. Use the last token
- // as the version, and set the classifier empty.
- if ( versionBuffer.length() < 1 )
- {
- if ( avceTokenList.size() > 1 )
- {
- int lastIdx = avceTokenList.size() - 1;
-
- versionBuffer.append( avceTokenList.get( lastIdx ) );
- avceTokenList.remove( lastIdx );
- }
-
- classifierBuffer.setLength( 0 );
- }
- else
- {
- // if everything is kosher, then pop off all the classifier and
- // version tokens, leaving the naked artifact id in the list.
- avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
- }
-
- StringBuffer artifactIdBuffer = new StringBuffer();
-
- firstToken = true;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- artifactIdBuffer.append( '-' );
- }
-
- artifactIdBuffer.append( token );
- }
-
- String artifactId = artifactIdBuffer.toString();
-
- if ( artifactId.length() > 0 )
- {
- int lastVersionCharIdx = versionBuffer.length() - 1;
- if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
- {
- versionBuffer.setLength( lastVersionCharIdx );
- }
-
- String version = versionBuffer.toString();
-
- if ( version.length() > 0 )
- {
- if ( classifierBuffer.length() > 0 )
- {
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifierBuffer.toString() );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, version,
- Artifact.SCOPE_RUNTIME, type );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename version is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename artifactId is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
- }
- }
- else
- {
- throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
- }
-
- return result;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for discovering metadata files.
- */
-public interface MetadataDiscoverer
- extends Discoverer
-{
- String ROLE = MetadataDiscoverer.class.getName();
-
- /**
- * Search for metadata files in the repository.
- *
- * @param repository The repository.
- * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
- * @param metadataFilter filter to use on the discovered metadata before returning
- * @return the list of artifacts found
- * @throws DiscovererException if there is a problem during the discovery process
- */
- List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter )
- throws DiscovererException;
-
- /**
- * Search for metadata files in the repository.
- *
- * @param repository The repository.
- * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
- * @return the list of artifacts found
- * @throws DiscovererException if there is a problem during the discovery process
- */
- List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns )
- throws DiscovererException;
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.SystemUtils;
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.util.DirectoryWalkListener;
+import org.codehaus.plexus.util.SelectorUtils;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * RepositoryScanner - this is an instance of a scan against a repository.
+ *
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class RepositoryScanner
+ implements DirectoryWalkListener
+{
+ public static final String ROLE = RepositoryScanner.class.getName();
+
+ private List consumers;
+
+ private ArtifactRepository repository;
+
+ private Logger logger;
+
+ private boolean isCaseSensitive = true;
+
+ private DiscovererStatistics stats;
+
+ private long onlyModifiedAfterTimestamp = 0;
+
+ public RepositoryScanner( ArtifactRepository repository, List consumerList )
+ {
+ this.repository = repository;
+ this.consumers = consumerList;
+ stats = new DiscovererStatistics( repository );
+
+ Iterator it = this.consumers.iterator();
+ while ( it.hasNext() )
+ {
+ Consumer consumer = (Consumer) it.next();
+
+ if ( !consumer.init( this.repository ) )
+ {
+ throw new IllegalStateException( "Consumer [" + consumer.getName()
+ + "] is reporting that it is incompatible with the [" + repository.getId() + "] repository." );
+ }
+ }
+
+ if ( SystemUtils.IS_OS_WINDOWS )
+ {
+ isCaseSensitive = false;
+ }
+ }
+
+ public DiscovererStatistics getStatistics()
+ {
+ return stats;
+ }
+
+ public void directoryWalkStarting( File basedir )
+ {
+ getLogger().info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getBasedir() );
+ stats.reset();
+ stats.timestampStarted = System.currentTimeMillis();
+ }
+
+ public void directoryWalkStep( int percentage, File file )
+ {
+ getLogger().debug( "Walk Step: " + percentage + ", " + file );
+
+ // Timestamp finished points to the last successful scan, not this current one.
+ if ( file.lastModified() < onlyModifiedAfterTimestamp )
+ {
+ // Skip file as no change has occured.
+ getLogger().debug( "Skipping, No Change: " + file.getAbsolutePath() );
+ stats.filesSkipped++;
+ return;
+ }
+
+ synchronized ( consumers )
+ {
+ stats.filesIncluded++;
+
+ BaseFile basefile = new BaseFile( repository.getBasedir(), file );
+
+ Iterator itConsumers = this.consumers.iterator();
+ while ( itConsumers.hasNext() )
+ {
+ Consumer consumer = (Consumer) itConsumers.next();
+
+ if ( wantsFile( consumer, basefile.getRelativePath() ) )
+ {
+ try
+ {
+ getLogger().debug( "Sending to consumer: " + consumer.getName() );
+ stats.filesConsumed++;
+ consumer.processFile( basefile );
+ }
+ catch ( Exception e )
+ {
+ /* Intentionally Catch all exceptions.
+ * So that the discoverer processing can continue.
+ */
+ getLogger().error(
+ "Consumer [" + consumer.getName() + "] had an error when processing file ["
+ + basefile.getAbsolutePath() + "]: " + e.getMessage(), e );
+ }
+ }
+ else
+ {
+ getLogger().debug(
+ "Skipping consumer " + consumer.getName() + " for file "
+ + basefile.getRelativePath() );
+ }
+ }
+ }
+ }
+
+ public void directoryWalkFinished()
+ {
+ getLogger().info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getBasedir() );
+ stats.timestampFinished = System.currentTimeMillis();
+ }
+
+ private boolean wantsFile( Consumer consumer, String relativePath )
+ {
+ Iterator it;
+
+ // Test excludes first.
+ it = consumer.getExcludePatterns().iterator();
+ while ( it.hasNext() )
+ {
+ String pattern = (String) it.next();
+ if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+ {
+ // Definately does NOT WANT FILE.
+ return false;
+ }
+ }
+
+ // Now test includes.
+ it = consumer.getIncludePatterns().iterator();
+ while ( it.hasNext() )
+ {
+ String pattern = (String) it.next();
+ if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) )
+ {
+ // Specifically WANTS FILE.
+ return true;
+ }
+ }
+
+ // Not included, and Not excluded? Default to EXCLUDE.
+ return false;
+ }
+
+ public long getOnlyModifiedAfterTimestamp()
+ {
+ return onlyModifiedAfterTimestamp;
+ }
+
+ public void setOnlyModifiedAfterTimestamp( long onlyModifiedAfterTimestamp )
+ {
+ this.onlyModifiedAfterTimestamp = onlyModifiedAfterTimestamp;
+ }
+
+ /**
+ * Debug method from DirectoryWalker.
+ */
+ public void debug( String message )
+ {
+ getLogger().debug( "Repository Scanner: " + message );
+ }
+
+ public Logger getLogger()
+ {
+ return logger;
+ }
+
+ public void setLogger( Logger logger )
+ {
+ this.logger = logger;
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllArtifactFilter
- implements ArtifactFilter
-{
- public boolean include( Artifact artifact )
- {
- return true;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Filter that accepts all.
- */
-public class AcceptAllMetadataFilter
- implements MetadataFilter
-{
- public boolean include( RepositoryMetadata metadata, long timestamp )
- {
- return true;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Ability to filter repository metadata lists.
- *
- * @todo should be in maven-artifact
- */
-public interface MetadataFilter
-{
- /**
- * Whether to include this metadata in the filtered list.
- *
- * @param metadata the metadata
- * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated
- * @return whether to include it
- */
- boolean include( RepositoryMetadata metadata, long timestamp );
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-/**
- * A filter to remove snapshot artifacts during discovery.
- */
-public class SnapshotArtifactFilter
- implements ArtifactFilter
-{
- public boolean include( Artifact artifact )
- {
- return !artifact.isSnapshot();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public abstract class AbstractArtifactDiscovererTest
- extends PlexusTestCase
-{
- protected ArtifactDiscoverer discoverer;
-
- private ArtifactFactory factory;
-
- protected ArtifactRepository repository;
-
- protected abstract String getLayout();
-
- protected abstract File getRepositoryFile();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getRepositoryFile();
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout =
- (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version )
- {
- Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
- {
- return factory.createArtifact( groupId, artifactId, version, null, type );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
+ */
+public abstract class AbstractDiscovererTestCase
+ extends PlexusTestCase
+{
+ protected Discoverer discoverer;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ discoverer = (Discoverer) lookup( Discoverer.ROLE );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( discoverer );
+ super.tearDown();
+ }
+
+ protected ArtifactRepository getLegacyRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "legacy" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ protected ArtifactRepository getDefaultRepository()
+ throws Exception
+ {
+ File repoBaseDir = new File( getBasedir(), "src/test/repository" );
+ ArtifactRepository repository = createRepository( repoBaseDir, "default" );
+ resetRepositoryState( repository );
+ return repository;
+ }
+
+ protected void resetRepositoryState( ArtifactRepository repository )
+ {
+ // Implement any kind of repository cleanup.
+ }
+
+ protected ArtifactRepository createRepository( File basedir, String layout )
+ throws Exception
+ {
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout );
+
+ return factory.createArtifactRepository( "discoveryRepo-" + getName(), "file://" + basedir, repoLayout, null,
+ null );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - added to allow IDE users to pull all tests into their tool.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( DefaultDiscovererTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the default artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:DefaultArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class DefaultArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
- private static final List JAVAX_BLACKLIST = Collections.singletonList( "javax/**" );
-
- protected String getLayout()
- {
- return "default";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- boolean b = path.indexOf( "CVS" ) >= 0;
- if ( b )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithBlacklist( JAVAX_BLACKLIST );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
- dPath.getComment() );
-
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongArtifactId()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not wrong jar",
- "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoType()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongerVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongSnapshotVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithSnapshotBaseVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
- "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testTestSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotInclusionWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check snapshot included", artifacts.contains(
- createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testStandalonePoms()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
-
- // cull down to actual artifacts (only standalone poms will have type = pom)
- Map keyedArtifacts = new HashMap();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
- if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
- {
- keyedArtifacts.put( key, a );
- }
- }
-
- List models = new ArrayList();
-
- for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
-
- if ( "pom".equals( a.getType() ) )
- {
- models.add( a );
- }
- }
-
- assertEquals( 4, models.size() );
-
- // Define order we expect
- Collections.sort( models );
-
- Iterator itr = models.iterator();
- Artifact model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "2.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.testgroup", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- }
-
- public void testShortPath()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
-
- fail( "Artifact should be null for short paths" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongArtifactId()
- throws ComponentLookupException
- {
-
- try
- {
- discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
-
- fail( "Artifact should be null for wrong ArtifactId" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
-
- fail( "Artifact should be null for wrong version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testLongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
-
- fail( "Artifact should be null for long version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongSnapshotVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
-
- fail( "Artifact should be null for wrong snapshot version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshotBaseVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
-
- fail( "Artifact should be null for snapshot base version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testPathWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
- }
-
- public void testWithJavaSourceInclusion()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
- }
-
- public void testDistributionArtifacts()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
-
- testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
-
- testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-
- public void testSnapshotWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
- artifact );
- }
-
- private List discoverArtifactsWithSnapshots()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
- }
-
- private List discoverArtifactsWithBlacklist( List list )
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, list, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifacts()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discoverer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.Logger;
+import org.codehaus.plexus.logging.console.ConsoleLogger;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * DefaultDiscovererTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class DefaultDiscovererTest
+ extends AbstractDiscovererTestCase
+{
+ private MockConsumer createAndAddMockConsumer( List consumers, String includePattern, String excludePattern )
+ {
+ MockConsumer mockConsumer = new MockConsumer();
+ mockConsumer.getIncludePatterns().add( includePattern );
+ if ( StringUtils.isNotBlank( excludePattern ) )
+ {
+ mockConsumer.getExcludePatterns().add( excludePattern );
+ }
+ consumers.add( mockConsumer );
+ return mockConsumer;
+ }
+
+ private void assertFilesProcessed( int expectedFileCount, DiscovererStatistics stats, MockConsumer mockConsumer )
+ {
+ assertNotNull( "Stats should not be null.", stats );
+ assertNotNull( "MockConsumer should not be null.", mockConsumer );
+ assertNotNull( "MockConsumer.filesProcessed should not be null.", mockConsumer.getFilesProcessed() );
+
+ if ( stats.getFilesConsumed() != mockConsumer.getFilesProcessed().size() )
+ {
+ fail( "Somehow, the stats count of files consumed, and the count of actual files "
+ + "processed by the consumer do not match." );
+ }
+
+ int actualFileCount = mockConsumer.getFilesProcessed().size();
+
+ if ( expectedFileCount != actualFileCount )
+ {
+ stats.dump( new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) );
+ System.out.println( "Base Dir:" + stats.getRepository().getBasedir() );
+ int num = 0;
+ Iterator it = mockConsumer.getFilesProcessed().iterator();
+ while ( it.hasNext() )
+ {
+ BaseFile file = (BaseFile) it.next();
+ System.out.println( " Processed File [" + num + "]: " + file.getRelativePath() );
+ num++;
+ }
+
+ fail( "Files Processed mismatch: expected:<" + expectedFileCount + ">, actual:<" + actualFileCount + ">" );
+ }
+ }
+
+ public void testLegacyLayoutRepositoryAll()
+ throws Exception
+ {
+ ArtifactRepository repository = getLegacyRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null );
+
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 16, stats, mockConsumer );
+ }
+
+ public void testDefaultLayoutRepositoryAll()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null );
+
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 42, stats, mockConsumer );
+ }
+
+ public void testDefaultLayoutRepositoryPomsOnly()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.pom", null );
+
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 10, stats, mockConsumer );
+ }
+
+ public void testDefaultLayoutRepositoryJarsOnly()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 17, stats, mockConsumer );
+ }
+
+ public void testDefaultLayoutRepositoryJarsNoSnapshots()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 13, stats, mockConsumer );
+ }
+
+ public void testDefaultLayoutRepositoryJarsNoSnapshotsWithExclusions()
+ throws Exception
+ {
+ ArtifactRepository repository = getDefaultRepository();
+ List consumers = new ArrayList();
+ MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null );
+
+ List exclusions = new ArrayList();
+ exclusions.add( "**/*-client.jar" );
+ DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false, 0, exclusions, null );
+
+ assertNotNull( stats );
+
+ assertFilesProcessed( 12, stats, mockConsumer );
+ }
+}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * This class tests the DefaultMetadataDiscoverer class.
- */
-public class DefaultMetadataDiscovererTest
- extends PlexusTestCase
-{
- private MetadataDiscoverer discoverer;
-
- private static final String TEST_OPERATION = "test";
-
- private ArtifactRepository repository;
-
- private ArtifactFactory factory;
-
- /**
- *
- */
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
-
- removeTimestampMetadata();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getTestFile( "src/test/repository" );
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- /**
- *
- */
- public void tearDown()
- throws Exception
- {
- super.tearDown();
- discoverer = null;
- }
-
- /**
- * Test if metadata file in wrong directory was added to the kickedOutPaths.
- */
- public void testKickoutWrongDirectory()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
- dPath.getComment() );
- }
- }
- assertTrue( found );
- }
-
- /**
- * Test if blank metadata file was added to the kickedOutPaths.
- */
- public void testKickoutBlankMetadata()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertTrue( "Check reason for kickout", dPath.getComment().matches(
- "Error reading metadata file '(.*)': input contained no data" ) );
- }
- }
- assertTrue( found );
- }
-
- private void removeTimestampMetadata()
- throws IOException
- {
- // remove the metadata that tracks time
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
- System.gc(); // for Windows
- file.delete();
- assertFalse( file.exists() );
- }
-
- public void testDiscoverMetadata()
- throws DiscovererException
- {
- List metadataPaths = discoverer.discoverMetadata( repository, null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata =
- new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata = new GroupRepositoryMetadata( "org.apache.maven" );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId )
- {
- return createArtifact( groupId, artifactId, "1.0" );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String version )
- {
- return factory.createArtifact( groupId, artifactId, version, null, "jar" );
- }
-
- private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
- {
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- RepositoryMetadata m = (RepositoryMetadata) i.next();
-
- if ( m.getGroupId().equals( metadata.getGroupId() ) )
- {
- if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
- {
- return true;
- }
- else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
- {
- return true;
- }
- }
- }
- return false;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.discoverer;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Test the legacy artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id:LegacyArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $
- */
-public class LegacyArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
- private static final List JAVAX_SQL_BLACKLIST = Collections.singletonList( "javax.sql/**" );
-
- protected String getLayout()
- {
- return "legacy";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/legacy-repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( path.indexOf( "CVS" ) >= 0 )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 );
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithBlacklist();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongPath()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithInvalidType()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoExtension()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongExtension()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path type does not match the extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testTextualVersion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverArtifacts();
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverArtifactsWithSnapshots();
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testWrongArtifactPackaging()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
-
- fail( "Artifact should be null for wrong package extension" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoArtifactId()
- throws DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
-
- try
- {
- discoverer.buildArtifact( "groupId/jars/1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
- }
-
- public void testFinal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/jars/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-
- public void testJavadoc()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc" ), artifact );
- }
-
- public void testSources()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/java-sources/jdbc-2.0-sources.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources" ), artifact );
- }
-
- public void testPlugin()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "maven/plugins/maven-test-plugin-1.8.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "maven", "maven-test-plugin", "1.8", "plugin" ), artifact );
- }
-
-
- private List discoverArtifacts()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifactsWithBlacklist()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, JAVAX_SQL_BLACKLIST, new SnapshotArtifactFilter() );
- }
-
- private List discoverArtifactsWithSnapshots()
- throws DiscovererException
- {
- return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() );
- }
-}
--- /dev/null
+/**
+ *
+ */
+package org.apache.maven.archiva.discoverer;
+
+import org.apache.maven.archiva.common.consumers.Consumer;
+import org.apache.maven.archiva.common.consumers.ConsumerException;
+import org.apache.maven.archiva.common.utils.BaseFile;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class MockConsumer
+ implements Consumer
+{
+ private List excludePatterns = new ArrayList();
+
+ private List includePatterns = new ArrayList();
+
+ private List filesProcessed = new ArrayList();
+
+ private int countFileProblems = 0;
+
+ public String getName()
+ {
+ return "MockConsumer (Testing Only)";
+ }
+
+ public boolean init( ArtifactRepository repository )
+ {
+ return true;
+ }
+
+ public void processFile( BaseFile file )
+ throws ConsumerException
+ {
+ filesProcessed.add( file );
+ }
+
+ public void processFileProblem( BaseFile file, String message )
+ {
+ countFileProblems++;
+ }
+
+ public List getExcludePatterns()
+ {
+ return excludePatterns;
+ }
+
+ public void setExcludePatterns( List excludePatterns )
+ {
+ this.excludePatterns = excludePatterns;
+ }
+
+ public List getIncludePatterns()
+ {
+ return includePatterns;
+ }
+
+ public void setIncludePatterns( List includePatterns )
+ {
+ this.includePatterns = includePatterns;
+ }
+
+ public int getCountFileProblems()
+ {
+ return countFileProblems;
+ }
+
+ public List getFilesProcessed()
+ {
+ return filesProcessed;
+ }
+}
\ No newline at end of file
~ under the License.
-->
+<!-- This metdata is intentionally wrong. -->
<metadata>
<groupId>javax.sql</groupId>
<artifactId>jdbc</artifactId>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>archiva-indexer</artifactId>
- <name>Archiva Repository Indexer</name>
+ <name>Archiva Indexer</name>
<dependencies>
<dependency>
<groupId>org.apache.maven</groupId>
import org.apache.maven.archiva.indexer.query.Query;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
import java.util.Collection;
import java.util.List;
Collection getAllRecordKeys()
throws RepositoryIndexException;
+ /**
+ * Indexes the artifact specified. If the artifact is already in the repository they it is updated.
+ * This method should use less memory than indexRecords as the records can be created and disposed of on the fly.
+ *
+ * @param artifact the artifact to index
+ * @param factory the artifact to record factory
+ * @throws RepositoryIndexException if there is a problem indexing the artifacts
+ */
+ void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException;
+
/**
* Indexes the artifacts found within the specified list. If the artifacts are already in the
* repository they are updated. This method should use less memory than indexRecords as the records can be
lastUpdatedTime = System.currentTimeMillis();
}
}
+
+ public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory )
+ throws RepositoryIndexException
+ {
+ IndexModifier indexModifier = null;
+ try
+ {
+ indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexModifier.deleteDocuments( term );
+
+ Document document = converter.convert( record );
+ document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexModifier.addDocument( document );
+ }
+ indexModifier.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( indexModifier );
+ lastUpdatedTime = System.currentTimeMillis();
+ }
+ }
public List getAllGroupIds()
throws RepositoryIndexException
<dependencies>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-discoverer</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-artifact</artifactId>
+ <artifactId>archiva-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.common.artifact.builder.BuilderException;
+import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
* @plexus.requirement
*/
private ArtifactFactory factory;
-
+
/**
* @plexus.requirement role-hint="default"
- * @todo use a map, and have priorities in them
+ * @todo use a map, and have priorities in them.
*/
- private ArtifactDiscoverer defaultArtifactDiscoverer;
-
+ private LayoutArtifactBuilder defaultArtifactBuilder;
+
/**
* @plexus.requirement role-hint="legacy"
*/
- private ArtifactDiscoverer legacyArtifactDiscoverer;
+ private LayoutArtifactBuilder legacyArtifactBuilder;
/**
* @plexus.requirement role="org.apache.maven.wagon.Wagon"
Artifact artifact = null;
try
{
- artifact = defaultArtifactDiscoverer.buildArtifact( artifactPath );
+ artifact = defaultArtifactBuilder.build( artifactPath );
getLogger().debug( "Artifact requested is: " + artifact );
}
- catch ( DiscovererException e )
+ catch ( BuilderException e )
{
msg = "Failed to build artifact from path:\n\tfrom default: " + e.getMessage();
}
{
try
{
- artifact = legacyArtifactDiscoverer.buildArtifact( artifactPath );
+ artifact = legacyArtifactBuilder.build( artifactPath );
getLogger().debug( "Artifact requested is: " + artifact );
}
- catch ( DiscovererException e )
+ catch ( BuilderException e )
{
getLogger().debug( msg + "\n\tfrom legacy: " + e.getMessage() );
}
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
-
<configuration>
<threshold>ERROR</threshold>
</configuration>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-jdo2</artifactId>
+ <version>1.0-alpha-8</version>
+ <exclusions>
+ <exclusion>
+ <groupId>xerces</groupId>
+ <artifactId>xercesImpl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>xerces</groupId>
+ <artifactId>xmlParserAPIs</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>jpox</groupId>
+ <artifactId>jpox</artifactId>
+ <version>1.1.6</version>
+ <scope>compile</scope>
+ <exclusions>
+ <!-- targeting JDK 1.4 we don't need this -->
+ <exclusion>
+ <groupId>javax.sql</groupId>
+ <artifactId>jdbc-stdext</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <!-- TEST DEPS -->
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.7.3.3</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<plugins>
<groupId>org.codehaus.modello</groupId>
<artifactId>modello-maven-plugin</artifactId>
<version>1.0-alpha-14-SNAPSHOT</version>
+ <configuration>
+ <version>1.0.0</version>
+ <packageWithVersion>false</packageWithVersion>
+ <model>src/main/mdo/reporting.mdo</model>
+ </configuration>
<executions>
<execution>
+ <id>modello-java</id>
<goals>
- <goal>xpp3-writer</goal>
<goal>java</goal>
+ <goal>jpox-metadata-class</goal>
+ <!--
+ <goal>xpp3-writer</goal>
<goal>xpp3-reader</goal>
+ -->
+ </goals>
+ </execution>
+ <execution>
+ <id>jpox-jdo-mapping</id>
+ <goals>
+ <goal>jpox-jdo-mapping</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${basedir}/target/classes/org/apache/maven/archiva/reporting/model/</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>jpox-maven-plugin</artifactId>
+ <version>1.1.6-SNAPSHOT</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>enhance</goal>
</goals>
</execution>
</executions>
- <configuration>
- <version>1.0.0</version>
- <model>src/main/mdo/reporting.mdo</model>
- </configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ReportingException
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ReportingException
+ extends Exception
+{
+
+ public ReportingException()
+ {
+ }
+
+ public ReportingException( String message )
+ {
+ super( message );
+ }
+
+ public ReportingException( Throwable cause )
+ {
+ super( cause );
+ }
+
+ public ReportingException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
+
+import java.util.List;
+
+import javax.jdo.Extent;
+import javax.jdo.JDOException;
+import javax.jdo.JDOHelper;
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.JDOUserException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * AbstractJdoResults - Base class for all JDO related results.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractJdoDatabase
+ implements Initializable
+{
+ /**
+ * @plexus.requirement role-hint="archiva"
+ */
+ private JdoFactory jdoFactory;
+
+ private PersistenceManagerFactory pmf;
+
+ // -------------------------------------------------------------------
+ // JPOX / JDO Specifics.
+ // -------------------------------------------------------------------
+
+ protected List getAllObjects( Class clazz, String ordering )
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ Extent extent = pm.getExtent( clazz, true );
+
+ Query query = pm.newQuery( extent );
+
+ if ( ordering != null )
+ {
+ query.setOrdering( ordering );
+ }
+
+// for ( Iterator i = fetchGroups.iterator(); i.hasNext(); )
+// {
+// pm.getFetchPlan().addGroup( (String) i.next() );
+// }
+
+ List result = (List) query.execute();
+
+ result = (List) pm.detachCopyAll( result );
+
+ tx.commit();
+
+ return result;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ protected Object getObjectByKey( Class clazz, Object key )
+ throws JDOObjectNotFoundException, JDOException
+ {
+ if ( key == null )
+ {
+ throw new JDOException( "Unable to get object from jdo using null key." );
+ }
+
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ // if ( fetchGroup != null )
+ // {
+ // pm.getFetchPlan().addGroup( fetchGroup );
+ // }
+
+ Object objectId = pm.newObjectIdInstance( clazz, key.toString() );
+
+ Object object = pm.getObjectById( objectId );
+
+ object = pm.detachCopy( object );
+
+ tx.commit();
+
+ return object;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ public void initialize()
+ throws InitializationException
+ {
+ pmf = jdoFactory.getPersistenceManagerFactory();
+ }
+
+ protected void removeObject( Object o )
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ o = pm.getObjectById( pm.getObjectId( o ) );
+
+ pm.deletePersistent( o );
+
+ tx.commit();
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ protected Object saveObject( Object object )
+ {
+ return saveObject( object, null );
+ }
+
+ protected Object saveObject( Object object, String fetchGroups[] )
+ throws JDOException
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ if ( ( JDOHelper.getObjectId( object ) != null ) && !JDOHelper.isDetached( object ) )
+ {
+ throw new JDOException( "Existing object is not detached: " + object );
+ }
+
+ if ( fetchGroups != null )
+ {
+ for ( int i = 0; i >= fetchGroups.length; i++ )
+ {
+ pm.getFetchPlan().addGroup( fetchGroups[i] );
+ }
+ }
+
+ pm.makePersistent( object );
+
+ object = pm.detachCopy( object );
+
+ tx.commit();
+
+ return object;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ protected PersistenceManager getPersistenceManager()
+ {
+ PersistenceManager pm = pmf.getPersistenceManager();
+
+ pm.getFetchPlan().setMaxFetchDepth( -1 );
+
+ return pm;
+ }
+
+ protected static void closePersistenceManager( PersistenceManager pm )
+ {
+ try
+ {
+ pm.close();
+ }
+ catch ( JDOUserException e )
+ {
+ // ignore
+ }
+ }
+
+ protected static void rollbackIfActive( Transaction tx )
+ {
+ PersistenceManager pm = tx.getPersistenceManager();
+
+ try
+ {
+ if ( tx.isActive() )
+ {
+ tx.rollback();
+ }
+ }
+ finally
+ {
+ closePersistenceManager( pm );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.model.Result;
+
+/**
+ * AbstractResultsDatabase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public abstract class AbstractResultsDatabase
+ extends AbstractJdoDatabase
+{
+ /**
+ * <p>
+ * Get the number of failures in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of failures in the database.
+ */
+ public abstract int getNumFailures();
+
+ /**
+ * <p>
+ * Get the number of warnings in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of warnings in the database.
+ */
+ public abstract int getNumWarnings();
+
+ /**
+ * <p>
+ * Get the number of notices in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of notices in the database.
+ */
+ public abstract int getNumNotices();
+
+ protected static Result createResult( String processor, String problem, String reason )
+ {
+ Result result = new Result();
+ result.setProcessor( processor );
+ result.setProblem( problem );
+ result.setReason( reason );
+ return result;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.archiva.reporting.model.ArtifactResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+import javax.jdo.PersistenceManager;
+import javax.jdo.Query;
+import javax.jdo.Transaction;
+
+/**
+ * ArtifactResultsDatabase - Database of ArtifactResults.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase"
+ */
+public class ArtifactResultsDatabase
+ extends AbstractResultsDatabase
+{
+ // -------------------------------------------------------------------
+ // ArtifactResults methods.
+ // -------------------------------------------------------------------
+
+ public static final String ROLE = ArtifactResultsDatabase.class.getName();
+
+ public void addFailure( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getFailures().contains( result ) )
+ {
+ results.addFailure( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addNotice( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getNotices().contains( result ) )
+ {
+ results.addNotice( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addWarning( Artifact artifact, String processor, String problem, String reason )
+ {
+ ArtifactResults results = getArtifactResults( artifact );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getWarnings().contains( result ) )
+ {
+ results.addWarning( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void clearResults( ArtifactResults results )
+ {
+ results.getFailures().clear();
+ results.getWarnings().clear();
+ results.getNotices().clear();
+
+ saveObject( results );
+ }
+
+ public List getAllArtifactResults()
+ {
+ return getAllObjects( ArtifactResults.class, null );
+ }
+
+ public Iterator getIterator()
+ {
+ List allartifacts = getAllArtifactResults();
+ if ( allartifacts == null )
+ {
+ return Collections.EMPTY_LIST.iterator();
+ }
+
+ return allartifacts.iterator();
+ }
+
+ public List findArtifactResults( String groupId, String artifactId, String version )
+ {
+ PersistenceManager pm = getPersistenceManager();
+ Transaction tx = pm.currentTransaction();
+
+ try
+ {
+ tx.begin();
+
+ Query query = pm.newQuery( "javax.jdo.query.JDOQL", "SELECT FROM " + ArtifactResults.class.getName()
+ + " WHERE groupId == findGroupId && " + " artifactId == findArtifactId && "
+ + " version == findVersionId" );
+ query.declareParameters( "String findGroupId, String findArtifactId, String findVersionId" );
+ query.setOrdering( "findArtifactId ascending" );
+
+ List result = (List) query.execute( groupId, artifactId, version );
+
+ result = (List) pm.detachCopyAll( result );
+
+ tx.commit();
+
+ return result;
+ }
+ finally
+ {
+ rollbackIfActive( tx );
+ }
+ }
+
+ public void remove( ArtifactResults results )
+ {
+ removeObject( results );
+ }
+
+ public void remove( Artifact artifact )
+ {
+ try
+ {
+ ArtifactResults results = lookupArtifactResults( artifact );
+ remove( results );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ // nothing to do.
+ }
+ }
+
+ /**
+ * Get an {@link ArtifactResults} from the store.
+ * If the store does not have one, create it.
+ *
+ * Equivalent to calling {@link #lookupArtifactResults(Artifact)} then if
+ * not found, using {@link #createArtifactResults(Artifact)}.
+ *
+ * @param artifact the artifact information
+ * @return the ArtifactResults object (may not be in database yet, so don't forget to {@link #saveObject(Object)})
+ * @see #lookupArtifactResults(Artifact)
+ * @see #createArtifactResults(Artifact)
+ */
+ public ArtifactResults getArtifactResults( Artifact artifact )
+ {
+ ArtifactResults results;
+
+ try
+ {
+ results = lookupArtifactResults( artifact );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ results = createArtifactResults( artifact );
+ }
+
+ return results;
+ }
+
+ /**
+ * Create a new {@link ArtifactResults} object from the provided Artifact information.
+ *
+ * @param artifact the artifact information.
+ * @return the new {@link ArtifactResults} object.
+ * @see #getArtifactResults(Artifact)
+ * @see #lookupArtifactResults(Artifact)
+ */
+ private ArtifactResults createArtifactResults( Artifact artifact )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+ * type, classifier.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ ArtifactResults results = new ArtifactResults();
+ results.setGroupId( StringUtils.defaultString( artifact.getGroupId() ) );
+ results.setArtifactId( StringUtils.defaultString( artifact.getArtifactId() ) );
+ results.setVersion( StringUtils.defaultString( artifact.getVersion() ) );
+ results.setType( StringUtils.defaultString( artifact.getType() ) );
+ results.setClassifier( StringUtils.defaultString( artifact.getClassifier() ) );
+
+ return results;
+ }
+
+ /**
+ * Lookup the {@link ArtifactResults} in the JDO store from the information in
+ * the provided Artifact.
+ *
+ * @param artifact the artifact information.
+ * @return the previously saved {@link ArtifactResults} from the JDO store.
+ * @throws JDOObjectNotFoundException if the {@link ArtifactResults} are not found.
+ * @see #getArtifactResults(Artifact)
+ * @see #createArtifactResults(Artifact)
+ */
+ private ArtifactResults lookupArtifactResults( Artifact artifact )
+ throws JDOObjectNotFoundException
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+ * type, classifier.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ ArtifactResultsKey key = new ArtifactResultsKey();
+ key.groupId = StringUtils.defaultString( artifact.getGroupId() );
+ key.artifactId = StringUtils.defaultString( artifact.getArtifactId() );
+ key.version = StringUtils.defaultString( artifact.getVersion() );
+ key.type = StringUtils.defaultString( artifact.getType() );
+ key.classifier = StringUtils.defaultString( artifact.getClassifier() );
+
+ return (ArtifactResults) getObjectByKey( ArtifactResults.class, key );
+ }
+
+ public int getNumFailures()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getFailures().size();
+ }
+ return count;
+ }
+
+ public int getNumNotices()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getNotices().size();
+ }
+ return count;
+ }
+
+ public int getNumWarnings()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ ArtifactResults results = (ArtifactResults) it.next();
+ count += results.getWarnings().size();
+ }
+ return count;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.model.MetadataResultsKey;
+import org.apache.maven.archiva.reporting.model.Result;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.jdo.JDOObjectNotFoundException;
+
+/**
+ * MetadataResultsDatabase
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.MetadataResultsDatabase"
+ */
+public class MetadataResultsDatabase
+ extends AbstractResultsDatabase
+{
+ public static final String ROLE = MetadataResultsDatabase.class.getName();
+
+ public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getFailures().contains( result ) )
+ {
+ results.addFailure( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getWarnings().contains( result ) )
+ {
+ results.addWarning( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
+ {
+ MetadataResults results = getMetadataResults( metadata );
+ Result result = createResult( processor, problem, reason );
+
+ if ( !results.getNotices().contains( result ) )
+ {
+ results.addNotice( result );
+ }
+
+ saveObject( results );
+ }
+
+ public void clearResults( MetadataResults results )
+ {
+ results.getFailures().clear();
+ results.getWarnings().clear();
+ results.getNotices().clear();
+
+ saveObject( results );
+ }
+
+ public List getAllMetadataResults()
+ {
+ return getAllObjects( MetadataResults.class, null );
+ }
+
+ public Iterator getIterator()
+ {
+ List allmetadatas = getAllMetadataResults();
+ if ( allmetadatas == null )
+ {
+ return Collections.EMPTY_LIST.iterator();
+ }
+
+ return allmetadatas.iterator();
+ }
+
+ public void remove( MetadataResults results )
+ {
+ removeObject( results );
+ }
+
+ public void remove( RepositoryMetadata metadata )
+ {
+ try
+ {
+ MetadataResults results = lookupMetadataResults( metadata );
+ remove( results );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ // nothing to do.
+ }
+ }
+
+ public MetadataResults getMetadataResults( RepositoryMetadata metadata )
+ {
+ MetadataResults results;
+
+ try
+ {
+ results = lookupMetadataResults( metadata );
+ }
+ catch ( JDOObjectNotFoundException e )
+ {
+ results = createMetadataResults( metadata );
+ }
+
+ return results;
+ }
+
+ private MetadataResults createMetadataResults( RepositoryMetadata metadata )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ MetadataResults results = new MetadataResults();
+ results.setGroupId( StringUtils.defaultString( metadata.getGroupId() ) );
+ results.setArtifactId( StringUtils.defaultString( metadata.getArtifactId() ) );
+ results.setVersion( StringUtils.defaultString( metadata.getBaseVersion() ) );
+
+ return results;
+ }
+
+ private MetadataResults lookupMetadataResults( RepositoryMetadata metadata )
+ {
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ MetadataResultsKey key = new MetadataResultsKey();
+ key.groupId = StringUtils.defaultString( metadata.getGroupId(), "" );
+ key.artifactId = StringUtils.defaultString( metadata.getArtifactId(), "" );
+ key.version = StringUtils.defaultString( metadata.getBaseVersion(), "" );
+
+ return (MetadataResults) getObjectByKey( MetadataResults.class, key );
+ }
+
+ public int getNumFailures()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getFailures().size();
+ }
+ return count;
+ }
+
+ public int getNumNotices()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getNotices().size();
+ }
+ return count;
+ }
+
+ public int getNumWarnings()
+ {
+ int count = 0;
+ for ( Iterator it = getIterator(); it.hasNext(); )
+ {
+ MetadataResults results = (MetadataResults) it.next();
+ count += results.getWarnings().size();
+ }
+ return count;
+ }
+}
* under the License.
*/
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.Result;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Date;
-import java.util.HashMap;
import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.Map;
-import java.util.Set;
/**
+ * The Main Reporting Database.
+ *
* @todo i18n, including message formatting and parameterisation
+ * @plexus.component role="org.apache.maven.archiva.reporting.database.ReportingDatabase"
*/
public class ReportingDatabase
{
- private final Reporting reporting;
-
- private Map artifactMap;
-
- private Map metadataMap;
-
- private int numFailures;
-
- private int numWarnings;
-
- private ArtifactRepository repository;
-
- private boolean inProgress;
-
- private long startTime;
-
- private final ReportGroup reportGroup;
-
- private Set metadataWithProblems;
-
- private Map filteredDatabases = new HashMap();
-
- private int numNotices;
-
- public ReportingDatabase( ReportGroup reportGroup )
- {
- this( reportGroup, new Reporting() );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, Reporting reporting )
- {
- this( reportGroup, reporting, null );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository )
- {
- this( reportGroup, new Reporting(), repository );
- }
-
- public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository )
- {
- this.reportGroup = reportGroup;
-
- this.reporting = reporting;
-
- this.repository = repository;
-
- initArtifactMap();
-
- initMetadataMap();
- }
-
- public void addFailure( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getFailures().contains( result ) )
- {
- results.addFailure( result );
- numFailures++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- }
- }
-
- public void addNotice( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getNotices().contains( result ) )
- {
- results.addNotice( result );
- numNotices++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- }
- }
-
- public void addWarning( Artifact artifact, String processor, String problem, String reason )
- {
- ArtifactResults results = getArtifactResults( artifact );
- Result result = createResult( processor, problem, reason );
- if ( !results.getWarnings().contains( result ) )
- {
- results.addWarning( result );
- numWarnings++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- }
- }
-
- ArtifactResults getArtifactResults( Artifact artifact )
- {
- return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getType(), artifact.getClassifier() );
- }
-
- private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- Map artifactMap = this.artifactMap;
-
- String key = getArtifactKey( groupId, artifactId, version, type, classifier );
- ArtifactResults results = (ArtifactResults) artifactMap.get( key );
- if ( results == null )
- {
- results = new ArtifactResults();
- results.setArtifactId( artifactId );
- results.setClassifier( classifier );
- results.setGroupId( groupId );
- results.setType( type );
- results.setVersion( version );
-
- artifactMap.put( key, results );
- reporting.getArtifacts().add( results );
- }
-
- return results;
- }
-
- private void initArtifactMap()
- {
- Map map = new HashMap();
- for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- ArtifactResults result = (ArtifactResults) i.next();
-
- String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(),
- result.getType(), result.getClassifier() );
- map.put( key, result );
-
- numFailures += result.getFailures().size();
- numWarnings += result.getWarnings().size();
- numNotices += result.getNotices().size();
- }
- artifactMap = map;
- }
-
- private static String getArtifactKey( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier;
- }
-
- private static Result createResult( String processor, String problem, String reason )
- {
- Result result = new Result();
- result.setProcessor( processor );
- result.setProblem( problem );
- result.setReason( reason );
- return result;
- }
-
- public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getFailures().contains( result ) )
- {
- results.addFailure( result );
- numFailures++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- }
- }
-
- public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getWarnings().contains( result ) )
- {
- results.addWarning( result );
- numWarnings++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- }
- }
-
- public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason )
- {
- MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() );
- if ( !metadataWithProblems.contains( results ) )
- {
- metadataWithProblems.add( results );
- }
- Result result = createResult( processor, problem, reason );
- if ( !results.getNotices().contains( result ) )
- {
- results.addNotice( result );
- numNotices++;
- }
- updateTimings();
-
- if ( filteredDatabases.containsKey( problem ) )
- {
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem );
-
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- }
- }
-
- public Set getMetadataWithProblems()
- {
- return metadataWithProblems;
- }
-
- private void initMetadataMap()
- {
- Map map = new HashMap();
- Set problems = new LinkedHashSet();
-
- for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); )
- {
- MetadataResults result = (MetadataResults) i.next();
-
- String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() );
-
- map.put( key, result );
-
- numFailures += result.getFailures().size();
- numWarnings += result.getWarnings().size();
- numNotices += result.getNotices().size();
-
- if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() )
- {
- problems.add( result );
- }
- }
- metadataMap = map;
- metadataWithProblems = problems;
- }
+ public static final String ROLE = ReportingDatabase.class.getName();
- private static String getMetadataKey( String groupId, String artifactId, String version )
- {
- return groupId + ":" + artifactId + ":" + version;
- }
-
- public int getNumFailures()
- {
- return numFailures;
- }
-
- public int getNumWarnings()
- {
- return numWarnings;
- }
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase artifactDatabase;
- public Reporting getReporting()
- {
- return reporting;
- }
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase metadataDatabase;
public Iterator getArtifactIterator()
{
- return reporting.getArtifacts().iterator();
+ return artifactDatabase.getIterator();
}
public Iterator getMetadataIterator()
{
- return reporting.getMetadata().iterator();
+ return metadataDatabase.getIterator();
}
- public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp )
+ public void clear()
{
- String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() );
- Map map = metadataMap;
- MetadataResults results = (MetadataResults) map.get( key );
- return results != null && results.getLastModified() >= timestamp;
}
/**
- * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones.
- *
- * @param metadata the metadata
- * @param lastModified the modification time of the file being tracked
+ * <p>
+ * Get the number of failures in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of failures in the database.
*/
- public void cleanMetadata( RepositoryMetadata metadata, long lastModified )
- {
- MetadataResults results = getMetadataResults( metadata, lastModified );
-
- results.setLastModified( lastModified );
-
- numFailures -= results.getFailures().size();
- results.getFailures().clear();
-
- numWarnings -= results.getWarnings().size();
- results.getWarnings().clear();
-
- numNotices -= results.getWarnings().size();
- results.getNotices().clear();
-
- metadataWithProblems.remove( results );
- }
-
- MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified )
- {
- return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(),
- lastModified );
- }
-
- private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion,
- long lastModified )
- {
- String key = getMetadataKey( groupId, artifactId, baseVersion );
- Map metadataMap = this.metadataMap;
- MetadataResults results = (MetadataResults) metadataMap.get( key );
- if ( results == null )
- {
- results = new MetadataResults();
- results.setArtifactId( artifactId );
- results.setGroupId( groupId );
- results.setVersion( baseVersion );
- results.setLastModified( lastModified );
-
- metadataMap.put( key, results );
- reporting.getMetadata().add( results );
- }
- return results;
- }
-
- public void removeArtifact( Artifact artifact )
- {
- Map map = artifactMap;
-
- String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getType(), artifact.getClassifier() );
- ArtifactResults results = (ArtifactResults) map.get( key );
- if ( results != null )
- {
- for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- if ( results.equals( i.next() ) )
- {
- i.remove();
- }
- }
-
- numFailures -= results.getFailures().size();
- numWarnings -= results.getWarnings().size();
- numNotices -= results.getNotices().size();
-
- map.remove( key );
- }
- }
-
- public ArtifactRepository getRepository()
- {
- return repository;
- }
-
- public boolean isInProgress()
- {
- return inProgress;
- }
-
- public void setInProgress( boolean inProgress )
- {
- this.inProgress = inProgress;
-
- if ( inProgress )
- {
- startTime = System.currentTimeMillis();
- }
- }
-
- public void clear()
- {
- // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact.
- numWarnings = 0;
- numNotices = 0;
- numFailures = 0;
-
- artifactMap.clear();
- metadataMap.clear();
- metadataWithProblems.clear();
- filteredDatabases.clear();
-
- reporting.getArtifacts().clear();
- reporting.getMetadata().clear();
-
- updateTimings();
- }
-
- public void setStartTime( long startTime )
- {
- this.startTime = startTime;
- }
-
- public long getStartTime()
- {
- return startTime;
- }
-
- public void updateTimings()
- {
- long startTime = getStartTime();
- Date endTime = new Date();
- if ( startTime > 0 )
- {
- getReporting().setExecutionTime( endTime.getTime() - startTime );
- }
- getReporting().setLastModified( endTime.getTime() );
- }
-
- public ReportGroup getReportGroup()
+ public int getNumFailures()
{
- return reportGroup;
+ int count = 0;
+ count += artifactDatabase.getNumFailures();
+ count += metadataDatabase.getNumFailures();
+ return count;
}
- public ReportingDatabase getFilteredDatabase( String filter )
+ /**
+ * <p>
+ * Get the number of notices in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of notices in the database.
+ */
+ public int getNumNotices()
{
- ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter );
-
- if ( reportingDatabase == null )
- {
- reportingDatabase = new ReportingDatabase( reportGroup, repository );
-
- Reporting reporting = reportingDatabase.getReporting();
- reporting.setExecutionTime( this.reporting.getExecutionTime() );
- reporting.setLastModified( this.reporting.getLastModified() );
-
- for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); )
- {
- ArtifactResults results = (ArtifactResults) i.next();
- ArtifactResults targetResults = null;
- for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addFailure( result );
- reportingDatabase.numFailures++;
- }
- }
- for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addWarning( result );
- reportingDatabase.numWarnings++;
- }
- }
- for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createArtifactResults( reportingDatabase, results );
- }
-
- targetResults.addNotice( result );
- reportingDatabase.numNotices++;
- }
- }
- }
- for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); )
- {
- MetadataResults results = (MetadataResults) i.next();
- MetadataResults targetResults = null;
- for ( Iterator j = results.getFailures().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addFailure( result );
- reportingDatabase.numFailures++;
- }
- }
- for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addWarning( result );
- reportingDatabase.numWarnings++;
- }
- }
- for ( Iterator j = results.getNotices().iterator(); j.hasNext(); )
- {
- Result result = (Result) j.next();
-
- if ( filter.equals( result.getProcessor() ) )
- {
- if ( targetResults == null )
- {
- // lazily create so it is not added unless it has to be
- targetResults = createMetadataResults( reportingDatabase, results );
- }
-
- targetResults.addNotice( result );
- reportingDatabase.numNotices++;
- }
- }
- }
-
- filteredDatabases.put( filter, reportingDatabase );
- }
-
- return reportingDatabase;
+ int count = 0;
+ count += artifactDatabase.getNumNotices();
+ count += metadataDatabase.getNumNotices();
+ return count;
}
- private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results )
+ /**
+ * <p>
+ * Get the number of warnings in the database.
+ * </p>
+ *
+ * <p>
+ * <b>WARNING:</b> This is a very resource intensive request. Use sparingly.
+ * </p>
+ *
+ * @return the number of warnings in the database.
+ */
+ public int getNumWarnings()
{
- MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(),
- results.getArtifactId(),
- results.getVersion(),
- results.getLastModified() );
- reportingDatabase.metadataWithProblems.add( targetResults );
- return targetResults;
+ int count = 0;
+ count += artifactDatabase.getNumWarnings();
+ count += metadataDatabase.getNumWarnings();
+ return count;
}
- private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results )
+ public ArtifactResultsDatabase getArtifactDatabase()
{
- return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(),
- results.getVersion(), results.getType(), results.getClassifier() );
+ return artifactDatabase;
}
- public int getNumNotices()
+ public MetadataResultsDatabase getMetadataDatabase()
{
- return numNotices;
+ return metadataDatabase;
}
}
+++ /dev/null
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.ArtifactDiscoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.MetadataDiscoverer;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStore;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.InvalidArtifactRTException;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
-import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Report executor implementation.
- *
- * @todo should the report set be limitable by configuration?
- * @plexus.component
- */
-public class DefaultReportExecutor
- extends AbstractLogEnabled
- implements ReportExecutor
-{
- /**
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- /**
- * @plexus.requirement
- */
- private ReportingStore reportingStore;
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer"
- */
- private Map artifactDiscoverers;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer"
- */
- private Map metadataDiscoverers;
-
- private static final int ARTIFACT_BUFFER_SIZE = 1000;
-
- public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
- throws ReportingStoreException
- {
- ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
- for ( Iterator i = metadata.iterator(); i.hasNext(); )
- {
- RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next();
-
- File file =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- reporter.cleanMetadata( repositoryMetadata, file.lastModified() );
-
- reportGroup.processMetadata( repositoryMetadata, repository, reporter );
- }
-
- reportingStore.storeReports( reporter, repository );
- }
-
- public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
- throws ReportingStoreException
- {
- ReportingDatabase reporter = getReportDatabase( repository, reportGroup );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
-
- Model model = null;
- try
- {
- Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
- MavenProject project =
- projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository );
-
- model = project.getModel();
- }
- catch ( InvalidArtifactRTException e )
- {
- reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e );
- }
- catch ( ProjectBuildingException e )
- {
- reporter.addWarning( artifact, null, null, "Error reading project model: " + e );
- }
-
- reporter.removeArtifact( artifact );
-
- reportGroup.processArtifact( artifact, model, reporter );
- }
-
- reportingStore.storeReports( reporter, repository );
- }
-
- public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException
- {
- getLogger().debug(
- "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() );
- return reportingStore.getReportsFromStore( repository, reportGroup );
- }
-
- public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
- ArtifactFilter filter )
- throws DiscovererException, ReportingStoreException
- {
- // Flush (as in toilet, not store) the report database
- ReportingDatabase database = getReportDatabase( repository, reportGroup );
- database.clear();
-
- // Discovery process
- String layoutProperty = getRepositoryLayout( repository.getLayout() );
- ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
-
- // Save some memory by not tracking paths we won't use
- // TODO: Plexus CDC should be able to inject this configuration
- discoverer.setTrackOmittedPaths( false );
-
- List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter );
-
- if ( !artifacts.isEmpty() )
- {
- getLogger().info( "Discovered " + artifacts.size() + " artifacts" );
-
- // Work through these in batches, then flush the project cache.
- for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE )
- {
- int end = j + ARTIFACT_BUFFER_SIZE;
- List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end );
-
- // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal)
-
- // run the reports.
- runArtifactReports( reportGroup, currentArtifacts, repository );
-
- // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack
- // around that. TODO: remove when it is configurable
- flushProjectBuilderCacheHack();
- }
- }
-
- MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty );
- List metadata =
- metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() );
-
- if ( !metadata.isEmpty() )
- {
- getLogger().info( "Discovered " + metadata.size() + " metadata files" );
-
- // run the reports
- runMetadataReports( reportGroup, metadata, repository );
- }
- }
-
- private String getRepositoryLayout( ArtifactRepositoryLayout layout )
- {
- // gross limitation that there is no reverse lookup of the hint for the layout.
- if ( layout.getClass().equals( DefaultRepositoryLayout.class ) )
- {
- return "default";
- }
- else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) )
- {
- return "legacy";
- }
- else
- {
- throw new IllegalArgumentException( "Unknown layout: " + layout );
- }
- }
-
- private void flushProjectBuilderCacheHack()
- {
- try
- {
- if ( projectBuilder != null )
- {
- java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" );
- f.setAccessible( true );
- Map cache = (Map) f.get( projectBuilder );
- cache.clear();
-
- f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" );
- f.setAccessible( true );
- cache = (Map) f.get( projectBuilder );
- cache.clear();
- }
- }
- catch ( NoSuchFieldException e )
- {
- throw new RuntimeException( e );
- }
- catch ( IllegalAccessException e )
- {
- throw new RuntimeException( e );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.executor;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-
-import java.util.List;
-
-/**
- * Executes a report or report group.
- */
-public interface ReportExecutor
-{
- /**
- * Plexus component role name.
- */
- String ROLE = ReportExecutor.class.getName();
-
- /**
- * Run reports on a set of metadata.
- *
- * @param reportGroup the report set to run
- * @param metadata the RepositoryMetadata objects to report on
- * @param repository the repository that they come from
- * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException
- * if there is a problem reading/writing the report database
- */
- public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository )
- throws ReportingStoreException;
-
- /**
- * Run reports on a set of artifacts.
- *
- * @param reportGroup the report set to run
- * @param artifacts the Artifact objects to report on
- * @param repository the repository that they come from
- * @throws ReportingStoreException if there is a problem reading/writing the report database
- */
- public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository )
- throws ReportingStoreException;
-
- /**
- * Get the report database in use for a given repository.
- *
- * @param repository the repository
- * @param reportGroup the report set to run
- * @return the report database
- * @throws ReportingStoreException if there is a problem reading the report database
- */
- ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException;
-
- /**
- * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered.
- *
- * @param repository the repository to run from
- * @param blacklistedPatterns the patterns to exclude during discovery
- * @param filter the filter to use during discovery to get a consistent list of artifacts
- * @param reportGroup the report set to run
- * @throws ReportingStoreException if there is a problem reading/writing the report database
- * @throws org.apache.maven.archiva.discoverer.DiscovererException
- * if there is a problem finding the artifacts and metadata to report on
- */
- public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns,
- ArtifactFilter filter )
- throws DiscovererException, ReportingStoreException;
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.filter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.discoverer.filter.MetadataFilter;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * Implementation of a reporting filter. Artifacts already in the database are ignored.
- */
-public class ReportingMetadataFilter
- implements MetadataFilter
-{
- private final ReportingDatabase reporter;
-
- public ReportingMetadataFilter( ReportingDatabase reporter )
- {
- this.reporter = reporter;
- }
-
- public boolean include( RepositoryMetadata metadata, long timestamp )
- {
- return !reporter.isMetadataUpToDate( metadata, timestamp );
- }
-}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
import org.apache.maven.artifact.Artifact;
*/
private Map metadataReports;
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase )
+ public void processArtifact( Artifact artifact, Model model )
{
for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); )
{
{
ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue();
- report.processArtifact( artifact, model, reportingDatabase );
+ report.processArtifact( artifact, model );
}
}
}
- public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
- ReportingDatabase reportingDatabase )
+ public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository )
{
for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); )
{
{
MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue();
- report.processMetadata( repositoryMetadata, repository, reportingDatabase );
+ report.processMetadata( repositoryMetadata, repository );
}
}
}
/**
* The default report set, for repository health.
*
- * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health"
+ * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup"
+ * role-hint="health"
* @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP )
*/
public class DefaultReportGroup
{
return "Repository Health";
}
-
- public String getFilename()
- {
- return "health-report.xml";
- }
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
*
* @param artifact the artifact to process
* @param model the POM associated with the artifact to process
- * @param reportingDatabase the report database to store results in
*/
- void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase );
+ void processArtifact( Artifact artifact, Model model );
/**
* Run any metadata related reports in the report set.
*
* @param repositoryMetadata the metadata to process
* @param repository the repository the metadata is located in
- * @param reportingDatabase the report database to store results in
*/
- void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository,
- ReportingDatabase reportingDatabase );
+ void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository );
/**
* Whether a report with the given role hint is included in this report set.
* @return the report name
*/
String getName();
-
- /**
- * Get the filename of the reports within the repository's reports directory.
- *
- * @return the filename
- */
- String getFilename();
}
--- /dev/null
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * ArtifactResultsKey - used by jpox for application identity for the {@link ArtifactResults} object and table.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsKey
+ implements Serializable
+{
+ public String groupId = "";
+
+ public String artifactId = "";
+
+ public String version = "";
+
+ public String type = "";
+
+ public String classifier = "";
+
+ public ArtifactResultsKey()
+ {
+ /* do nothing */
+ }
+
+ public ArtifactResultsKey( String key )
+ {
+ String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+ groupId = parts[0];
+ artifactId = parts[1];
+ version = parts[2];
+ type = parts[3];
+ classifier = parts[4];
+ }
+
+ public String toString()
+ {
+ return StringUtils.join( new String[] { groupId, artifactId, version, type, classifier }, ':' );
+ }
+
+ public int hashCode()
+ {
+ final int PRIME = 31;
+ int result = 1;
+ result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+ result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+ result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+ result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() );
+ result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() );
+ return result;
+ }
+
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+
+ if ( obj == null )
+ {
+ return false;
+ }
+
+ if ( getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+ if ( groupId == null )
+ {
+ if ( other.groupId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !groupId.equals( other.groupId ) )
+ {
+ return false;
+ }
+
+ if ( artifactId == null )
+ {
+ if ( other.artifactId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !artifactId.equals( other.artifactId ) )
+ {
+ return false;
+ }
+
+ if ( version == null )
+ {
+ if ( other.version != null )
+ {
+ return false;
+ }
+ }
+ else if ( !version.equals( other.version ) )
+ {
+ return false;
+ }
+
+ if ( type == null )
+ {
+ if ( other.type != null )
+ {
+ return false;
+ }
+ }
+ else if ( !type.equals( other.type ) )
+ {
+ return false;
+ }
+
+ if ( classifier == null )
+ {
+ if ( other.classifier != null )
+ {
+ return false;
+ }
+ }
+ else if ( !classifier.equals( other.classifier ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+import java.io.Serializable;
+
+/**
+ * MetadataResultsKey - used by jpox for application identity for the {@link MetadataResults} object and table.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsKey
+ implements Serializable
+{
+ public String groupId = "";
+
+ public String artifactId = "";
+
+ public String version = "";
+
+ public MetadataResultsKey()
+ {
+ /* do nothing */
+ }
+
+ public MetadataResultsKey( String key )
+ {
+ String parts[] = StringUtils.splitPreserveAllTokens( key, ':' );
+ groupId = parts[0];
+ artifactId = parts[1];
+ version = parts[2];
+ }
+
+ public String toString()
+ {
+ return StringUtils.join( new String[] { groupId, artifactId, version }, ':' );
+ }
+
+ public int hashCode()
+ {
+ final int PRIME = 31;
+ int result = 1;
+ result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() );
+ result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() );
+ result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() );
+ return result;
+ }
+
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+
+ if ( obj == null )
+ {
+ return false;
+ }
+
+ if ( getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ final ArtifactResultsKey other = (ArtifactResultsKey) obj;
+
+ if ( groupId == null )
+ {
+ if ( other.groupId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !groupId.equals( other.groupId ) )
+ {
+ return false;
+ }
+
+ if ( artifactId == null )
+ {
+ if ( other.artifactId != null )
+ {
+ return false;
+ }
+ }
+ else if ( !artifactId.equals( other.artifactId ) )
+ {
+ return false;
+ }
+
+ if ( version == null )
+ {
+ if ( other.version != null )
+ {
+ return false;
+ }
+ }
+ else if ( !version.equals( other.version ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Model;
{
String ROLE = ArtifactReportProcessor.class.getName();
- void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter );
+ void processArtifact( Artifact artifact, Model model );
}
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.layer.RepositoryQueryLayer;
import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
*/
private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
private static final String ROLE_HINT = "bad-metadata";
/**
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
if ( metadata.storedInGroupDirectory() )
{
try
{
- checkPluginMetadata( metadata, repository, reporter );
+ checkPluginMetadata( metadata, repository );
}
catch ( IOException e )
{
- addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e );
+ addWarning( metadata, null, "Error getting plugin artifact directories versions: " + e );
}
}
else
}
if ( !found )
{
- addFailure( reporter, metadata, "missing-last-updated",
- "Missing lastUpdated element inside the metadata." );
+ addFailure( metadata, "missing-last-updated", "Missing lastUpdated element inside the metadata." );
}
if ( metadata.storedInArtifactVersionDirectory() )
{
- checkSnapshotMetadata( metadata, repository, reporter );
+ checkSnapshotMetadata( metadata, repository );
}
else
{
- checkMetadataVersions( metadata, repository, reporter );
+ checkMetadataVersions( metadata, repository );
try
{
- checkRepositoryVersions( metadata, repository, reporter );
+ checkRepositoryVersions( metadata, repository );
}
catch ( IOException e )
{
String reason = "Error getting plugin artifact directories versions: " + e;
- addWarning( reporter, metadata, null, reason );
+ addWarning( metadata, null, reason );
}
}
}
}
- private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addWarning( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( metadata, ROLE_HINT, problem, reason );
+ database.addWarning( metadata, ROLE_HINT, problem, reason );
}
/**
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
throws IOException
{
- File metadataDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ File metadataDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+ .getParentFile();
List pluginDirs = getArtifactIdFiles( metadataDir );
Map prefixes = new HashMap();
String artifactId = plugin.getArtifactId();
if ( artifactId == null || artifactId.length() == 0 )
{
- addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(),
+ addFailure( metadata, "missing-artifact-id:" + plugin.getPrefix(),
"Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() );
}
String prefix = plugin.getPrefix();
if ( prefix == null || prefix.length() == 0 )
{
- addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId,
+ addFailure( metadata, "missing-plugin-prefix:" + artifactId,
"Missing or empty plugin prefix for artifactId " + artifactId + "." );
}
else
{
if ( prefixes.containsKey( prefix ) )
{
- addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix,
- "Duplicate plugin prefix found: " + prefix + "." );
+ addFailure( metadata, "duplicate-plugin-prefix:" + prefix, "Duplicate plugin prefix found: "
+ + prefix + "." );
}
else
{
File pluginDir = new File( metadataDir, artifactId );
if ( !pluginDirs.contains( pluginDir ) )
{
- addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId,
- "Metadata plugin " + artifactId + " not found in the repository" );
+ addFailure( metadata, "missing-plugin-from-repository:" + artifactId, "Metadata plugin "
+ + artifactId + " not found in the repository" );
}
else
{
for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
{
File plugin = (File) plugins.next();
- addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " +
- plugin.getName() + " is present in the repository but " + "missing in the metadata." );
+ addFailure( metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + plugin.getName()
+ + " is present in the repository but " + "missing in the metadata." );
}
}
}
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+ RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
Versioning versioning = metadata.getMetadata().getVersioning();
if ( versioning != null )
{
Snapshot snapshot = versioning.getSnapshot();
- String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION,
- snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() );
- Artifact artifact =
- artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+ String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, snapshot
+ .getTimestamp()
+ + "-" + snapshot.getBuildNumber() );
+ Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(),
+ version );
artifact.isSnapshot(); // trigger baseVersion correction
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version,
- "Snapshot artifact " + version + " does not exist." );
+ addFailure( metadata, "missing-snapshot-artifact-from-repository:" + version, "Snapshot artifact "
+ + version + " does not exist." );
}
}
}
* @param repository the repository where the metadata was encountered
* @param reporter the ReportingDatabase to receive processing results
*/
- private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository )
{
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+ RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
Versioning versioning = metadata.getMetadata().getVersioning();
if ( versioning != null )
{
String version = (String) versions.next();
- Artifact artifact =
- artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version );
+ Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata
+ .getArtifactId(), version );
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " +
- version + " is present in metadata but " + "missing in the repository." );
+ addFailure( metadata, "missing-artifact-from-repository:" + version, "Artifact version " + version
+ + " is present in metadata but " + "missing in the repository." );
}
}
}
* @param reporter the ReportingDatabase to receive processing results
* @throws java.io.IOException if there is a problem reading from the file system
*/
- private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository )
throws IOException
{
Versioning versioning = metadata.getMetadata().getVersioning();
List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST;
- File versionsDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) )
+ .getParentFile();
// TODO: I don't know how this condition can happen, but it was seen on the main repository.
// Avoid hard failure
String version = path.getParentFile().getName();
if ( !metadataVersions.contains( version ) )
{
- addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " +
- version + " found in the repository but " + "missing in the metadata." );
+ addFailure( metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + version
+ + " found in the repository but " + "missing in the metadata." );
}
}
}
else
{
- addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir );
+ addFailure( metadata, null, "Metadata's directory did not exist: " + versionsDir );
}
}
return artifactIdFiles;
}
- private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addFailure( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ database.addFailure( metadata, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
* @plexus.requirement role-hint="md5"
*/
private Digester md5Digester;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
private static final String ROLE_HINT = "checksum";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
// TODO: make md5 configurable
// verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, artifact );
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ReportingDatabase reporter, Artifact artifact )
+ Artifact artifact )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
}
catch ( DigesterException e )
{
- addFailure( reporter, artifact, "checksum-wrong", e.getMessage() );
+ addFailure( artifact, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
+ addFailure( artifact, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- addFailure( reporter, artifact, "checksum-missing",
+ addFailure( artifact, "checksum-missing",
digester.getAlgorithm() + " checksum file does not exist." );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.codehaus.plexus.digest.Digester;
*/
private Digester md5Digester;
+ /**
+ * @plexus.requirement
+ */
+ private MetadataResultsDatabase database;
+
private static final String ROLE_HINT = "checksum-metadata";
/**
* Validate the checksums of the metadata. Get the metadata file from the
* repository then validate the checksum.
*/
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ReportingDatabase reporter )
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository )
{
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
//check if checksum files exist
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
File file = new File( repository.getBasedir(), path );
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+ verifyChecksum( repository, path + ".md5", file, md5Digester, metadata );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, metadata );
}
private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ReportingDatabase reporter, RepositoryMetadata metadata )
+ RepositoryMetadata metadata )
{
File checksumFile = new File( repository.getBasedir(), path );
if ( checksumFile.exists() )
}
catch ( DigesterException e )
{
- addFailure( reporter, metadata, "checksum-wrong", e.getMessage() );
+ addFailure( metadata, "checksum-wrong", e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
+ addFailure( metadata, "checksum-io-exception", "Read file error: " + e.getMessage() );
}
}
else
{
- addFailure( reporter, metadata, "checksum-missing",
- digester.getAlgorithm() + " checksum file does not exist." );
+ addFailure( metadata, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." );
}
}
- private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem,
- String reason )
+ private void addFailure( RepositoryMetadata metadata, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( metadata, ROLE_HINT, problem, reason );
+ database.addFailure( metadata, ROLE_HINT, problem, reason );
}
}
import org.apache.maven.archiva.layer.RepositoryQueryLayer;
import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
*/
private RepositoryQueryLayerFactory layerFactory;
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
private static final String POM = "pom";
private static final String ROLE_HINT = "dependency";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() );
if ( !queryLayer.containsArtifact( artifact ) )
{
// TODO: is this even possible?
- addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" );
+ addFailure( artifact, "missing-artifact", "Artifact does not exist in the repository" );
}
if ( model != null && POM.equals( artifact.getType() ) )
{
List dependencies = model.getDependencies();
- processDependencies( dependencies, reporter, queryLayer, artifact );
+ processDependencies( dependencies, queryLayer, artifact );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
- private void processDependencies( List dependencies, ReportingDatabase reporter,
- RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact )
+ private void processDependencies( List dependencies, RepositoryQueryLayer repositoryQueryLayer,
+ Artifact sourceArtifact )
{
if ( dependencies.size() > 0 )
{
if ( !repositoryQueryLayer.containsArtifact( artifact ) )
{
- String reason = MessageFormat.format(
- "Artifact''s dependency {0} does not exist in the repository",
- new String[]{getDependencyString( dependency )} );
- addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ),
- reason );
+ String reason = MessageFormat
+ .format( "Artifact''s dependency {0} does not exist in the repository",
+ new String[] { getDependencyString( dependency ) } );
+ addFailure( sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), reason );
}
}
catch ( InvalidVersionSpecificationException e )
{
String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}",
- new String[]{getDependencyString( dependency ),
- dependency.getVersion()} );
- addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
+ new String[] {
+ getDependencyString( dependency ),
+ dependency.getVersion() } );
+ addFailure( sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason );
}
}
}
}
return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec,
- dependency.getType(), dependency.getClassifier(),
- dependency.getScope() );
+ dependency.getType(), dependency.getClassifier(), dependency
+ .getScope() );
}
}
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
*/
private String indexDirectory;
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
private static final String ROLE_HINT = "duplicate";
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( artifact.getFile() != null )
}
catch ( DigesterException e )
{
- addWarning( reporter, artifact, null,
- "Unable to generate checksum for " + artifact.getFile() + ": " + e );
+ addWarning( artifact, null, "Unable to generate checksum for " + artifact.getFile() + ": " + e );
}
if ( checksum != null )
{
try
{
- List results = index.search( new LuceneQuery(
- new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+ List results = index
+ .search( new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum
+ .toLowerCase() ) ) ) );
if ( !results.isEmpty() )
{
String groupId = artifact.getGroupId();
if ( groupId.equals( result.getGroupId() ) )
{
- addFailure( reporter, artifact, "duplicate",
- "Found duplicate for " + artifact.getId() );
+ addFailure( artifact, "duplicate", "Found duplicate for " + artifact.getId() );
}
}
}
}
catch ( RepositoryIndexSearchException e )
{
- addWarning( reporter, artifact, null, "Failed to search in index" + e );
+ addWarning( artifact, null, "Failed to search in index" + e );
}
}
}
else
{
- addWarning( reporter, artifact, null, "Artifact file is null" );
+ addWarning( artifact, null, "Artifact file is null" );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addWarning( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( artifact, ROLE_HINT, problem, reason );
+ database.addWarning( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
{
private static final String ROLE_HINT = "invalid-pom";
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
/**
* @param artifact The pom xml file to be validated, passed as an artifact object.
* @param reporter The artifact reporter object.
*/
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
if ( "pom".equals( artifact.getType().toLowerCase() ) )
if ( !f.exists() )
{
- addFailure( reporter, artifact, "pom-missing", "POM not found." );
+ addFailure( artifact, "pom-missing", "POM not found." );
}
else
{
}
catch ( XmlPullParserException e )
{
- addFailure( reporter, artifact, "pom-parse-exception",
+ addFailure( artifact, "pom-parse-exception",
"The pom xml file is not well-formed. Error while parsing: " + e.getMessage() );
}
catch ( IOException e )
{
- addFailure( reporter, artifact, "pom-io-exception",
- "Error while reading the pom xml file: " + e.getMessage() );
+ addFailure( artifact, "pom-io-exception", "Error while reading the pom xml file: " + e.getMessage() );
}
finally
{
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
}
*/
import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.project.MavenProjectBuilder;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
private ArtifactFactory artifactFactory;
// TODO: share with other code with the same
- private static final Set JAR_FILE_TYPES =
- new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) );
+ private static final Set JAR_FILE_TYPES = new HashSet( Arrays.asList( new String[] {
+ "jar",
+ "war",
+ "par",
+ "ejb",
+ "ear",
+ "rar",
+ "sar" } ) );
/**
* @plexus.requirement
*/
- private MavenProjectBuilder projectBuilder;
+ private ArtifactResultsDatabase database;
private static final String POM = "pom";
* location is valid based on the location specified in the pom. Check if the both the location
* specified in the file system pom and in the pom included in the package is the same.
*/
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
adjustDistributionArtifactHandler( artifact );
{
//check if the artifact is located in its proper location based on the info
//specified in the model object/pom
- Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(),
- model.getArtifactId(),
- model.getVersion(),
- artifact.getType(),
- artifact.getClassifier() );
+ Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), model
+ .getArtifactId(), model.getVersion(), artifact.getType(), artifact.getClassifier() );
adjustDistributionArtifactHandler( modelArtifact );
String modelPath = repository.pathOf( modelArtifact );
if ( !modelPath.equals( artifactPath ) )
{
- addFailure( reporter, artifact, "repository-pom-location",
- "The artifact is out of place. It does not match the specified location in the repository pom: " +
- modelPath );
+ addFailure( artifact, "repository-pom-location",
+ "The artifact is out of place. It does not match the specified location in the repository pom: "
+ + modelPath );
}
}
}
{
//unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
//check if the pom is included in the package
- Model extractedModel = readArtifactModel( file, artifact, reporter );
+ Model extractedModel = readArtifactModel( file, artifact );
if ( extractedModel != null )
{
extractedModel.getPackaging() );
if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
{
- addFailure( reporter, artifact, "packaged-pom-location",
+ addFailure( artifact, "packaged-pom-location",
"The artifact is out of place. It does not match the specified location in the packaged pom." );
}
}
}
else
{
- addFailure( reporter, artifact, "missing-artifact",
- "The artifact file [" + file + "] cannot be found for metadata." );
+ addFailure( artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." );
}
}
- private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addFailure( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addFailure( artifact, ROLE_HINT, problem, reason );
+ database.addFailure( artifact, ROLE_HINT, problem, reason );
}
private static void adjustDistributionArtifactHandler( Artifact artifact )
}
}
- private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter )
+ private Model readArtifactModel( File file, Artifact artifact )
{
Model model = null;
jar = new JarFile( file );
//Get the entry and its input stream.
- JarEntry entry = jar.getJarEntry(
- "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" );
+ JarEntry entry = jar.getJarEntry( "META-INF/maven/" + artifact.getGroupId() + "/"
+ + artifact.getArtifactId() + "/pom.xml" );
// If the entry is not null, extract it.
if ( entry != null )
}
catch ( IOException e )
{
- addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e );
+ addWarning( artifact, "Unable to read artifact to extract model: " + e );
}
catch ( XmlPullParserException e )
{
- addWarning( reporter, artifact, "Unable to parse extracted model: " + e );
+ addWarning( artifact, "Unable to parse extracted model: " + e );
}
finally
{
return model;
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason )
+ private void addWarning( Artifact artifact, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addWarning( artifact, ROLE_HINT, null, reason );
+ database.addWarning( artifact, ROLE_HINT, null, reason );
}
private Model readModel( InputStream entryStream )
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
{
String ROLE = MetadataReportProcessor.class.getName();
- void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter );
+ void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository );
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
* @plexus.configuration default-value="31536000"
*/
private int maxAge;
+
+ /**
+ * TODO: Must create an 'Old Artifact' database.
+ * TODO: Base this off of an artifact table query instead.
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
- public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter )
+ public void processArtifact( Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addNotice( artifact, ROLE_HINT, "old-artifact",
+ database.addNotice( artifact, ROLE_HINT, "old-artifact",
"The artifact is older than the maximum age of " + maxAge + " seconds." );
}
}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.handler.DefaultArtifactHandler;
import org.apache.maven.artifact.repository.ArtifactRepository;
*/
private int maxSnapshots;
- public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter )
+ /**
+ * TODO: Must create an 'Old Artifact' database.
+ * TODO: Base this off of an artifact table query instead.
+ * @plexus.requirement
+ */
+ private ArtifactResultsDatabase database;
+
+ public void processArtifact( final Artifact artifact, Model model )
{
ArtifactRepository repository = artifact.getRepository();
if ( !"file".equals( repository.getProtocol() ) )
{
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl()
+ + "'. Only file based repositories are supported" );
}
adjustDistributionArtifactHandler( artifact );
catch ( ParseException e )
{
throw new IllegalStateException(
- "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) );
+ "Shouldn't match timestamp pattern and not be able to parse it: "
+ + m.group( 2 ) );
}
if ( System.currentTimeMillis() - timestamp > maxAge * 1000 )
{
- addNotice( reporter, artifact, "snapshot-expired-time",
- "The artifact is older than the maximum age of " + maxAge + " seconds." );
+ addNotice( artifact, "snapshot-expired-time", "The artifact is older than the maximum age of "
+ + maxAge + " seconds." );
}
else if ( maxSnapshots > 0 )
{
{
public boolean accept( File file, String string )
{
- return string.startsWith( artifact.getArtifactId() + "-" ) &&
- string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
+ return string.startsWith( artifact.getArtifactId() + "-" )
+ && string.endsWith( "." + artifact.getArtifactHandler().getExtension() );
}
} );
- List/*<Integer>*/ buildNumbers = new ArrayList();
+ List/*<Integer>*/buildNumbers = new ArrayList();
Integer currentBuild = null;
for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); )
{
if ( buildNumbers.contains( currentBuild ) )
{
- addNotice( reporter, artifact, "snapshot-expired-count",
+ addNotice( artifact, "snapshot-expired-count",
"The artifact is older than the maximum number of retained snapshot builds." );
}
}
}
}
- private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason )
+ private void addNotice( Artifact artifact, String problem, String reason )
{
// TODO: reason could be an i18n key derived from the processor and the problem ID and the
- reporter.addNotice( artifact, ROLE_HINT, problem, reason );
+ database.addNotice( artifact, ROLE_HINT, problem, reason );
}
private static void adjustDistributionArtifactHandler( Artifact artifact )
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.Reporting;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader;
-import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Load and store the reports. No synchronization is used, but it is unnecessary as the old object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultReportingStore
- extends AbstractLogEnabled
- implements ReportingStore
-{
- /**
- * The cached reports for given repositories.
- */
- private Map/*<String,ReportingDatabase>*/ reports = new HashMap();
-
- public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException
- {
- String key = getKey( repository, reportGroup );
- ReportingDatabase database = (ReportingDatabase) reports.get( key );
-
- if ( database == null )
- {
- ReportingXpp3Reader reader = new ReportingXpp3Reader();
-
- File file = getReportFilename( repository, reportGroup );
-
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( file );
- }
- catch ( FileNotFoundException e )
- {
- database = new ReportingDatabase( reportGroup, repository );
- }
-
- if ( database == null )
- {
- getLogger().info( "Reading report database from " + file );
- try
- {
- Reporting reporting = reader.read( fileReader, false );
- database = new ReportingDatabase( reportGroup, reporting, repository );
- }
- catch ( IOException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- catch ( XmlPullParserException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtils.closeQuietly( fileReader );
- }
- }
-
- reports.put( key, database );
- }
- return database;
- }
-
- private static String getKey( ArtifactRepository repository, ReportGroup reportGroup )
- {
- return repository.getId() + "/" + reportGroup.getFilename();
- }
-
- private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup )
- {
- return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() );
- }
-
- public void storeReports( ReportingDatabase database, ArtifactRepository repository )
- throws ReportingStoreException
- {
- database.updateTimings();
-
- ReportingXpp3Writer writer = new ReportingXpp3Writer();
-
- File file = getReportFilename( repository, database.getReportGroup() );
- getLogger().info( "Writing reports to " + file );
- FileWriter fileWriter = null;
- try
- {
- file.getParentFile().mkdirs();
-
- fileWriter = new FileWriter( file );
- writer.write( fileWriter, database.getReporting() );
- }
- catch ( IOException e )
- {
- throw new ReportingStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtils.closeQuietly( fileWriter );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * A component for loading the reporting database into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ReportingStore
-{
- /**
- * The Plexus role for the component.
- */
- String ROLE = ReportingStore.class.getName();
-
- /**
- * Get the reports from the store. A cached version may be used.
- *
- * @param repository the repository to load the reports for
- * @param reportGroup the report group to get the report for
- * @return the reporting database
- * @throws ReportingStoreException if there was a problem reading the store
- */
- ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup )
- throws ReportingStoreException;
-
- /**
- * Save the reporting to the store.
- *
- * @param database the reports to store
- * @param repository the repositorry to store the reports in
- * @throws ReportingStoreException if there was a problem writing the store
- */
- void storeReports( ReportingDatabase database, ArtifactRepository repository )
- throws ReportingStoreException;
-
-}
+++ /dev/null
-package org.apache.maven.archiva.reporting.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * Exception occurring using the reporting store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ReportingStoreException
- extends Exception
-{
- public ReportingStoreException( String message )
- {
- super( message );
- }
-
- public ReportingStoreException( String message, Throwable e )
- {
- super( message, e );
- }
-}
+<?xml version="1.0" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
</default>
</defaults>
<classes>
- <class rootElement="true" xml.tagName="reporting">
+ <class rootElement="true" xml.tagName="reporting" stash.storable="false">
<name>Reporting</name>
<version>1.0.0</version>
<fields>
<multiplicity>*</multiplicity>
</association>
</field>
- <field xml.attribute="true">
- <name>lastModified</name>
- <version>1.0.0</version>
- <type>long</type>
- </field>
- <field xml.attribute="true">
- <name>executionTime</name>
- <version>1.0.0</version>
- <type>long</type>
- </field>
</fields>
</class>
- <class>
+ <class stash.storable="true"
+ jpox.use-identifiers-as-primary-key="true"
+ jpox.identity-type="application"
+ jpox.identity-class="ArtifactResultsKey">
<name>ArtifactResults</name>
<version>1.0.0</version>
<fields>
- <field>
- <name>failures</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>warnings</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field>
- <name>notices</name>
- <version>1.0.0</version>
- <association>
- <type>Result</type>
- <multiplicity>*</multiplicity>
- </association>
- </field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>groupId</name>
<identity>true</identity>
<version>1.0.0</version>
The group ID of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>artifactId</name>
<version>1.0.0</version>
<identity>true</identity>
The artifact ID of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>version</name>
<version>1.0.0</version>
<identity>true</identity>
The version of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>type</name>
<version>1.0.0</version>
<type>String</type>
The type of the artifact in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>classifier</name>
<version>1.0.0</version>
<type>String</type>
The classifier of the artifact in the result.
</description>
</field>
- </fields>
- </class>
- <class>
- <name>MetadataResults</name>
- <version>1.0.0</version>
- <fields>
<field>
<name>failures</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
<field>
<name>warnings</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
<field>
<name>notices</name>
<version>1.0.0</version>
- <association>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
<type>Result</type>
<multiplicity>*</multiplicity>
</association>
</field>
- <field xml.attribute="true">
+ </fields>
+ </class>
+ <class stash.storable="true"
+ jpox.use-identifiers-as-primary-key="true"
+ jpox.identity-type="application"
+ jpox.identity-class="MetadataResultsKey">
+ <name>MetadataResults</name>
+ <version>1.0.0</version>
+ <fields>
+ <field xml.attribute="true"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>groupId</name>
<version>1.0.0</version>
<type>String</type>
The group ID of the metadata in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>artifactId</name>
<version>1.0.0</version>
<type>String</type>
The artifact ID of the metadata in the result.
</description>
</field>
- <field xml.attribute="true">
+ <field xml.attribute="true"
+ jpox.nullValue="none"
+ jpox.primary-key="true"
+ jpox.value-strategy="off"
+ jpox.persistence-modifier="persistent">
<name>version</name>
<version>1.0.0</version>
<type>String</type>
The version of the metadata in the result.
</description>
</field>
+ <field>
+ <name>failures</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>warnings</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
+ <field>
+ <name>notices</name>
+ <version>1.0.0</version>
+ <association stash.part="true"
+ jpox.join="true"
+ java.init="field"
+ jpox.dependent="true"
+ java.generate-break="false"
+ java.generate-create="false">
+ <type>Result</type>
+ <multiplicity>*</multiplicity>
+ </association>
+ </field>
<field xml.attribute="true">
<name>lastModified</name>
<version>1.0.0</version>
</field>
</fields>
</class>
- <class>
+ <class stash.storable="true">
<name>Result</name>
<version>1.0.0</version>
<fields>
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.jpox.SchemaTool;
import java.io.File;
+import java.net.URL;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.jdo.PersistenceManager;
+import javax.jdo.PersistenceManagerFactory;
/**
*
{
super.setUp();
+ setupJdoFactory();
+
File repositoryDirectory = getTestFile( "src/test/repository" );
factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
}
- protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId,
- String version )
+ protected void setupJdoFactory()
+ throws Exception
+ {
+ DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
+
+ jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); //$NON-NLS-1$
+
+ jdoFactory.setDriverName( "org.hsqldb.jdbcDriver" ); //$NON-NLS-1$
+
+ jdoFactory.setUrl( "jdbc:hsqldb:mem:" + getName() ); //$NON-NLS-1$
+
+ jdoFactory.setUserName( "sa" ); //$NON-NLS-1$
+
+ jdoFactory.setPassword( "" ); //$NON-NLS-1$
+
+ jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_UNCOMMITTED" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" ); //$NON-NLS-1$ //$NON-NLS-2$
+
+ jdoFactory.setProperty( "javax.jdo.PersistenceManagerFactoryClass", "org.jpox.PersistenceManagerFactoryImpl" );
+
+ Properties properties = jdoFactory.getProperties();
+
+ for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
+ {
+ Map.Entry entry = (Map.Entry) it.next();
+
+ System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
+ }
+
+ SchemaTool.createSchemaTables( new URL[] { getClass()
+ .getResource( "/org/apache/maven/archiva/reporting/model/package.jdo" ) }, new URL[] {}, null, false, null ); //$NON-NLS-1$
+
+ PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
+
+ assertNotNull( pmf );
+
+ PersistenceManager pm = pmf.getPersistenceManager();
+
+ pm.close();
+ }
+
+ protected Artifact createArtifactFromRepository( File repository, String groupId, String artifactId, String version )
throws Exception
{
Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "jar" );
- artifact.setRepository(
- factory.createArtifactRepository( "repository", repository.toURL().toString(), layout, null, null ) );
+ artifact.setRepository( factory.createArtifactRepository( "repository", repository.toURL().toString(), layout,
+ null, null ) );
artifact.isSnapshot();
protected Artifact createArtifactWithClassifier( String groupId, String artifactId, String version, String type,
String classifier )
{
- Artifact artifact =
- artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+ Artifact artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
artifact.setRepository( repository );
return artifact;
}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests - Used to Aide in IDE based development.
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.*" );
+ //$JUnit-BEGIN$
+ suite.addTest( org.apache.maven.archiva.reporting.database.AllTests.suite() );
+ suite.addTest( org.apache.maven.archiva.reporting.processor.AllTests.suite() );
+ suite.addTest( org.apache.maven.archiva.reporting.reporter.AllTests.suite() );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.database" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( ArtifactResultsDatabaseTest.class );
+ suite.addTestSuite( MetadataResultsDatabaseTest.class );
+ suite.addTestSuite( ReportingDatabaseTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.ArtifactResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+import java.util.List;
+
+/**
+ * ArtifactResultsDatabaseTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ArtifactResultsDatabaseTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private Artifact artifact;
+
+ private String processor, problem, reason;
+
+ private ArtifactResultsDatabase database;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+
+ artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
+ "classifier", null );
+ processor = "processor";
+ problem = "problem";
+ reason = "reason";
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( database );
+
+ super.tearDown();
+ }
+
+ public void testAddNoticeArtifactStringStringString()
+ {
+ database.addNotice( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, artifactResults.getNotices().size() );
+
+ database.addNotice( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, artifactResults.getNotices().size() );
+ }
+
+ public void testAddWarningArtifactStringStringString()
+ {
+ database.addWarning( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, artifactResults.getWarnings().size() );
+
+ database.addWarning( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, artifactResults.getWarnings().size() );
+ }
+
+ public void testAddFailureArtifactStringStringString()
+ {
+ database.addFailure( artifact, processor, problem, reason );
+ ArtifactResults artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, artifactResults.getFailures().size() );
+
+ database.addFailure( artifact, processor, problem, reason );
+ artifactResults = database.getArtifactResults( artifact );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, artifactResults.getFailures().size() );
+ }
+
+ public void testFindArtifactResults()
+ {
+ String groupId = "org.test.group";
+
+ Artifact bar = createArtifact( "org.bar", "bar", "2.0" );
+ Artifact foo = createArtifact( groupId, "foo", "1.0" );
+ Artifact fooSources = createArtifactWithClassifier( groupId, "foo", "1.0", "jar", "sources" );
+ Artifact fooJavadoc = createArtifactWithClassifier( groupId, "foo", "1.0", "jar", "javadoc" );
+
+ database.addFailure( bar, processor, problem, "A reason that should not be found." );
+
+ String testprocessor = "test-processor";
+ String testproblem = "test-problem";
+
+ database.addFailure( foo, testprocessor, testproblem, "Test Reason on main jar." );
+ database.addFailure( foo, testprocessor, testproblem, "Someone mistook this for an actual reason." );
+ database.addWarning( foo, testprocessor, testproblem, "Congrats you have a test reason." );
+
+ database.addFailure( fooSources, testprocessor, testproblem, "Sources do not seem to match classes." );
+ database.addWarning( fooJavadoc, testprocessor, testproblem, "Javadoc content makes no sense." );
+
+ ArtifactResults artifactResults = database.getArtifactResults( foo );
+
+ assertEquals( 4, database.getNumFailures() );
+ assertEquals( 2, artifactResults.getFailures().size() );
+
+ List hits = database.findArtifactResults( groupId, "foo", "1.0" );
+ assertNotNull( hits );
+
+// for ( Iterator it = hits.iterator(); it.hasNext(); )
+// {
+// ArtifactResults result = (ArtifactResults) it.next();
+// System.out.println( " result: " + result.getGroupId() + ":" + result.getArtifactId() + ":"
+// + result.getVersion() + ":" + result.getClassifier() + ":" + result.getType() );
+//
+// for ( Iterator itmsgs = result.getFailures().iterator(); itmsgs.hasNext(); )
+// {
+// Result res = (Result) itmsgs.next();
+// String msg = (String) res.getReason();
+// System.out.println( " failure: " + msg );
+// }
+//
+// for ( Iterator itmsgs = result.getWarnings().iterator(); itmsgs.hasNext(); )
+// {
+// Result res = (Result) itmsgs.next();
+// String msg = (String) res.getReason();
+// System.out.println( " warning: " + msg );
+// }
+//
+// for ( Iterator itmsgs = result.getNotices().iterator(); itmsgs.hasNext(); )
+// {
+// Result res = (Result) itmsgs.next();
+// String msg = (String) res.getReason();
+// System.out.println( " notice: " + msg );
+// }
+// }
+
+ assertEquals( "Should find 3 artifacts", 3, hits.size() ); // 3 artifacts
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * MetadataResultsDatabaseTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class MetadataResultsDatabaseTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private MetadataResultsDatabase database;
+
+ private RepositoryMetadata metadata;
+
+ private String processor, problem, reason;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+
+ Artifact artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope",
+ "type", "classifier", null );
+ metadata = new ArtifactRepositoryMetadata( artifact );
+
+ processor = "processor";
+ problem = "problem";
+ reason = "reason";
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( database );
+
+ super.tearDown();
+ }
+
+ public void testAddNoticeRepositoryMetadataStringStringString()
+ {
+ database.addNotice( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, metadataResults.getNotices().size() );
+
+ database.addNotice( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumNotices() );
+ assertEquals( 1, metadataResults.getNotices().size() );
+ }
+
+ public void testAddWarningRepositoryMetadataStringStringString()
+ {
+ database.addWarning( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, metadataResults.getWarnings().size() );
+
+ database.addWarning( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumWarnings() );
+ assertEquals( 1, metadataResults.getWarnings().size() );
+ }
+
+ public void testAddFailureRepositoryMetadataStringStringString()
+ {
+ database.addFailure( metadata, processor, problem, reason );
+ MetadataResults metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, metadataResults.getFailures().size() );
+
+ database.addFailure( metadata, processor, problem, reason );
+ metadataResults = database.getMetadataResults( metadata );
+
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 1, metadataResults.getFailures().size() );
+ }
+}
* under the License.
*/
-import junit.framework.TestCase;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
/**
* Test for {@link ReportingDatabase}.
* @version $Id$
*/
public class ReportingDatabaseTest
- extends TestCase
+ extends AbstractRepositoryReportsTestCase
{
- private Artifact artifact;
-
- private String processor, problem, reason;
-
- private ReportingDatabase reportingDatabase;
-
- private RepositoryMetadata metadata;
+ private ReportingDatabase database;
protected void setUp()
throws Exception
{
super.setUp();
- artifact = new DefaultArtifact( "group", "artifact", VersionRange.createFromVersion( "1.0" ), "scope", "type",
- "classifier", null );
- processor = "processor";
- problem = "problem";
- reason = "reason";
- reportingDatabase = new ReportingDatabase( null );
-
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
-
- public void testAddNoticeArtifactStringStringString()
- {
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, artifactResults.getNotices().size() );
-
- reportingDatabase.addNotice( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, artifactResults.getNotices().size() );
+ database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
}
- public void testAddWarningArtifactStringStringString()
- {
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, artifactResults.getWarnings().size() );
-
- reportingDatabase.addWarning( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, artifactResults.getWarnings().size() );
- }
-
- public void testAddFailureArtifactStringStringString()
- {
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- ArtifactResults artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, artifactResults.getFailures().size() );
-
- reportingDatabase.addFailure( artifact, processor, problem, reason );
- artifactResults = reportingDatabase.getArtifactResults( artifact );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, artifactResults.getFailures().size() );
- }
-
- public void testAddNoticeRepositoryMetadataStringStringString()
- {
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, metadataResults.getNotices().size() );
-
- reportingDatabase.addNotice( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumNotices() );
- assertEquals( 1, metadataResults.getNotices().size() );
- }
-
- public void testAddWarningRepositoryMetadataStringStringString()
+ protected void tearDown()
+ throws Exception
{
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, metadataResults.getWarnings().size() );
-
- reportingDatabase.addWarning( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumWarnings() );
- assertEquals( 1, metadataResults.getWarnings().size() );
+ release( database );
+ super.tearDown();
}
- public void testAddFailureRepositoryMetadataStringStringString()
+ public void testLookup()
{
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- MetadataResults metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, metadataResults.getFailures().size() );
-
- reportingDatabase.addFailure( metadata, processor, problem, reason );
- metadataResults = reportingDatabase.getMetadataResults( metadata, System.currentTimeMillis() );
-
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 1, metadataResults.getFailures().size() );
+ assertNotNull( "database should not be null.", database );
+ assertNotNull( "database.artifactDatabase should not be null.", database.getArtifactDatabase() );
+ assertNotNull( "database.metadataDatabase should not be null.", database.getMetadataDatabase() );
}
}
--- /dev/null
+package org.apache.maven.archiva.reporting.processor;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.processor" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( LocationArtifactReportProcessorTest.class );
+ suite.addTestSuite( DuplicateArtifactFileReportProcessorTest.class );
+ suite.addTestSuite( OldSnapshotArtifactReportProcessorTest.class );
+ suite.addTestSuite( DependencyArtifactReportProcessorTest.class );
+ suite.addTestSuite( OldArtifactReportProcessorTest.class );
+ suite.addTestSuite( InvalidPomArtifactReportProcessorTest.class );
+ suite.addTestSuite( BadMetadataReportProcessorTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
* under the License.
*/
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
import org.apache.maven.archiva.reporting.model.MetadataResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import java.util.Iterator;
+/**
+ * BadMetadataReportProcessorTest
+ *
+ * @version $Id$
+ */
public class BadMetadataReportProcessorTest
extends AbstractRepositoryReportsTestCase
{
private MetadataReportProcessor badMetadataReportProcessor;
- private ReportingDatabase reportingDatabase;
+ private MetadataResultsDatabase database;
protected void setUp()
throws Exception
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "bad-metadata" );
}
+ protected void tearDown()
+ throws Exception
+ {
+ release( artifactFactory );
+ release( badMetadataReportProcessor );
+ super.tearDown();
+ }
+
public void testMetadataMissingLastUpdated()
{
Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, null );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
if ( alpha1First )
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+ .getReason() );
}
else
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
}
result = (Result) failures.next();
if ( !alpha1First )
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-1 found in the repository but missing in the metadata.", result
+ .getReason() );
}
else
{
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
}
assertFalse( "check no more failures", failures.hasNext() );
}
public void testSnapshotMetadataMissingVersioning()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertMetadata( metadata, results );
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.", result
+ .getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
- result.getReason() );
+ assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository", result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason",
- "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
- result.getReason() );
+ assertEquals( "check reason", "Plugin snapshot-artifact is present in the repository but "
+ + "missing in the metadata.", result.getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a failure", failures.hasNext() );
Result result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default3", result
+ .getReason() );
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata for plugin default4", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
assertTrue( "check there is a 2nd failure", failures.hasNext() );
result = (Result) failures.next();
// TODO: should be more robust
- assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
- result.getReason() );
+ assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
public void testValidSnapshotMetadata()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
Snapshot snapshot = new Snapshot();
snapshot.setBuildNumber( 1 );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertFalse( "check there are no failures", failures.hasNext() );
}
public void testInvalidSnapshotMetadata()
{
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact",
+ "1.0-alpha-1-SNAPSHOT", "type" );
Snapshot snapshot = new Snapshot();
snapshot.setBuildNumber( 2 );
RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
- badMetadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
+ badMetadataReportProcessor.processMetadata( metadata, repository );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
Result result = (Result) failures.next();
assertMetadata( metadata, results );
// TODO: should be more robust
- assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.",
- result.getReason() );
+ assertEquals( "check reason", "Snapshot artifact 1.0-alpha-1-20050611.202024-2 does not exist.", result
+ .getReason() );
assertFalse( "check no more failures", failures.hasNext() );
}
private static void assertMetadata( RepositoryMetadata metadata, MetadataResults results )
{
- assertEquals( "check metadata", metadata.getGroupId(), results.getGroupId() );
- assertEquals( "check metadata", metadata.getArtifactId(), results.getArtifactId() );
- assertEquals( "check metadata", metadata.getBaseVersion(), results.getVersion() );
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getGroupId() ), results.getGroupId() );
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getArtifactId() ), results.getArtifactId() );
+ assertEquals( "check metadata", StringUtils.defaultString( metadata.getBaseVersion() ), results.getVersion() );
}
private Plugin createMetadataPlugin( String artifactId, String prefix )
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
private static final String VALID_VERSION = "1.0-alpha-1";
- private ReportingDatabase reportingDatabase;
+ private ArtifactResultsDatabase database;
private Model model;
{
super.setUp();
model = new Model();
- processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
-
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "dependency" );
}
public void testArtifactFoundButNoDirectDependencies()
{
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
private Artifact createValidArtifact()
{
- Artifact projectArtifact =
- artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID, VALID_VERSION );
+ Artifact projectArtifact = artifactFactory.createProjectArtifact( VALID_GROUP_ID, VALID_ARTIFACT_ID,
+ VALID_VERSION );
projectArtifact.setRepository( repository );
return projectArtifact;
}
{
Artifact artifact = artifactFactory.createProjectArtifact( INVALID, INVALID, INVALID );
artifact.setRepository( repository );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
private Dependency createValidDependency()
Dependency dependency = createValidDependency();
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithValidMultipleDependencies()
model.addDependency( dependency );
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithAnInvalidDependency()
model.addDependency( createDependency( INVALID, INVALID, INVALID ) );
Artifact artifact = createValidArtifact();
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Result result = (Result) failures.next();
- assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ),
- result.getReason() );
+ assertEquals( getDependencyNotFoundMessage( createDependency( INVALID, INVALID, INVALID ) ), result.getReason() );
}
public void testValidArtifactWithInvalidDependencyGroupId()
Dependency dependency = createDependency( INVALID, VALID_ARTIFACT_ID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, INVALID, VALID_VERSION );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, INVALID );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[" );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, "[1.0,)" );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
}
public void testValidArtifactWithMissingDependencyVersion()
Dependency dependency = createDependency( VALID_GROUP_ID, VALID_ARTIFACT_ID, null );
model.addDependency( dependency );
- processor.processArtifact( artifact, model, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( 0, reportingDatabase.getNumNotices() );
+ processor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getIterator();
ArtifactResults results = (ArtifactResults) failures.next();
assertFalse( failures.hasNext() );
failures = results.getFailures().iterator();
import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.model.Model;
File indexDirectory;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
protected void setUp()
throws Exception
FileUtils.deleteDirectory( indexDirectory );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+
artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
- System.out.println( "artifact = " + artifact );
+
model = new Model();
RepositoryArtifactIndexFactory factory =
index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
public void testNullArtifactFile()
{
artifact.setFile( null );
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 1, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 1, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnAlreadyIndexedArtifact()
throws Exception
{
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnDifferentGroupId()
throws Exception
{
artifact.setGroupId( "different.groupId" );
- processor.processArtifact( artifact, model, reportDatabase );
+ processor.processArtifact( artifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testSuccessOnNewArtifact()
{
Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
- processor.processArtifact( newArtifact, model, reportDatabase );
+ processor.processArtifact( newArtifact, model );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no failures", 0, reportDatabase.getNumFailures() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no failures", 0, database.getNumFailures() );
}
public void testFailure()
artifact.getVersion(), artifact.getType() );
duplicate.setFile( artifact.getFile() );
- processor.processArtifact( duplicate, model, reportDatabase );
+ processor.processArtifact( duplicate, model );
- assertEquals( "Check warnings", 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
- assertEquals( "Check no failures", 1, reportDatabase.getNumFailures() );
+ assertEquals( "Check warnings", 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
+ assertEquals( "Check no failures", 1, database.getNumFailures() );
}
private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
/**
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "artifactId", "1.0-alpha-3", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "jar" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
}
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( pomArtifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( pomArtifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
/**
Artifact pomArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-2", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, model );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ assertEquals( 1, database.getNumFailures() );
}
/**
Artifact pomArtifact = createArtifact( "org.apache.maven", "maven-archiver", "2.0", "pom" );
Model model = readPom( repository.pathOf( pomArtifact ) );
- artifactReportProcessor.processArtifact( artifact, model, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, model );
+ assertEquals( 1, database.getNumFailures() );
}
private Model readPom( String path )
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-monitor", "2.1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
/**
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-project", "2.1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 1, reportDatabase.getNumFailures() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
}
}
*/
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-artifact" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
- reportDatabase = new ReportingDatabase( reportGroup );
}
public void testOldArtifact()
{
Artifact artifact = createArtifact( "org.apache.maven", "maven-model", "2.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
- ArtifactResults results = (ArtifactResults) reportDatabase.getArtifactIterator().next();
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 1, database.getNumNotices() );
+ ArtifactResults results = (ArtifactResults) database.getIterator().next();
assertEquals( artifact.getArtifactId(), results.getArtifactId() );
assertEquals( artifact.getGroupId(), results.getGroupId() );
assertEquals( artifact.getVersion(), results.getVersion() );
Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testMissingArtifact()
try
{
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, null );
fail( "Should not have passed" );
}
catch ( IllegalStateException e )
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportDatabase;
+ private ArtifactResultsDatabase database;
private File tempRepository;
throws Exception
{
super.setUp();
- artifactReportProcessor =
- (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "old-snapshot-artifact" );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE,
+ "old-snapshot-artifact" );
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "old-artifact" );
- reportDatabase = new ReportingDatabase( reportGroup );
tempRepository = getTestFile( "target/test-repository" );
FileUtils.deleteDirectory( tempRepository );
}
{
Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-20050611.202024-1", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 1, reportDatabase.getNumNotices() );
- Iterator artifactIterator = reportDatabase.getArtifactIterator();
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 1, database.getNumNotices() );
+ Iterator artifactIterator = database.getIterator();
assertArtifactResults( artifactIterator, artifact );
}
{
Artifact artifact = createArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "pom" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testNonSnapshotArtifact()
{
Artifact artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testNewSnapshotArtifact()
String date = new SimpleDateFormat( "yyyyMMdd.HHmmss" ).format( new Date() );
FileUtils.writeStringToFile( new File( dir, "artifactId-1.0-alpha-1-" + date + "-1.jar" ), "foo", null );
- Artifact artifact =
- createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date + "-1" );
+ Artifact artifact = createArtifactFromRepository( repository, "groupId", "artifactId", "1.0-alpha-1-" + date
+ + "-1" );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check no notices", 0, reportDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check no notices", 0, database.getNumNotices() );
}
public void testTooManySnapshotArtifact()
for ( int i = 1; i <= 5; i++ )
{
- Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId",
- "1.0-alpha-1-" + date + "-" + i );
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ Artifact artifact = createArtifactFromRepository( tempRepository, "groupId", "artifactId", "1.0-alpha-1-"
+ + date + "-" + i );
+ artifactReportProcessor.processArtifact( artifact, null );
}
- assertEquals( 0, reportDatabase.getNumFailures() );
- assertEquals( 0, reportDatabase.getNumWarnings() );
- assertEquals( "Check notices", 3, reportDatabase.getNumNotices() );
- Iterator artifactIterator = reportDatabase.getArtifactIterator();
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "Check notices", 3, database.getNumNotices() );
+ Iterator artifactIterator = database.getIterator();
for ( int i = 1; i <= 3; i++ )
{
String version = "1.0-alpha-1-" + date + "-" + i;
try
{
- artifactReportProcessor.processArtifact( artifact, null, reportDatabase );
+ artifactReportProcessor.processArtifact( artifact, null );
fail( "Should not have passed" );
}
catch ( IllegalStateException e )
--- /dev/null
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+/**
+ * AllTests
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class AllTests
+{
+
+ public static Test suite()
+ {
+ TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.reporting.reporter" );
+ //$JUnit-BEGIN$
+ suite.addTestSuite( DefaultArtifactReporterTest.class );
+ suite.addTestSuite( ChecksumMetadataReporterTest.class );
+ suite.addTestSuite( ChecksumArtifactReporterTest.class );
+ //$JUnit-END$
+ return suite;
+ }
+
+}
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
-import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
import org.codehaus.plexus.digest.DigesterException;
-import java.io.File;
import java.io.IOException;
-import java.util.Iterator;
/**
* This class tests the ChecksumArtifactReportProcessor.
{
private ArtifactReportProcessor artifactReportProcessor;
- private ReportingDatabase reportingDatabase;
-
- private MetadataReportProcessor metadataReportProcessor;
+ private ArtifactResultsDatabase database;
public void setUp()
throws Exception
{
super.setUp();
artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
- metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ database = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
}
/**
Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 0, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 0, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
}
/**
String s1 = "1.0";
Artifact artifact = createArtifact( "checksumTest", s, s1 );
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
- assertEquals( 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
- }
-
- /**
- * Test the valid checksum of a metadata file.
- * The reportingDatabase should report 2 success validation.
- */
- public void testChecksumMetadataReporterSuccess()
- throws DigesterException, IOException
- {
- createMetadataFile( "VALID" );
- createMetadataFile( "INVALID" );
-
- Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
- //Version level metadata
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- //Artifact level metadata
- metadata = new ArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- //Group level metadata
- metadata = new GroupRepositoryMetadata( "checksumTest" );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
- }
-
- /**
- * Test the corrupted checksum of a metadata file.
- * The reportingDatabase must report 2 failures.
- */
- public void testChecksumMetadataReporterFailure()
- {
- Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- Iterator failures = reportingDatabase.getMetadataIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- MetadataResults results = (MetadataResults) failures.next();
- failures = results.getFailures().iterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- }
-
- /**
- * Test the conditional when the checksum files of the artifact & metadata do not exist.
- */
- public void testChecksumFilesDoNotExist()
- throws DigesterException, IOException
- {
- createChecksumFile( "VALID" );
- createMetadataFile( "VALID" );
- deleteChecksumFiles( "jar" );
-
- Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
-
- artifactReportProcessor.processArtifact( artifact, null, reportingDatabase );
- assertEquals( 1, reportingDatabase.getNumFailures() );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reportingDatabase );
-
- Iterator failures = reportingDatabase.getMetadataIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- MetadataResults results = (MetadataResults) failures.next();
- failures = results.getFailures().iterator();
- assertTrue( "check there is a failure", failures.hasNext() );
-
- deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, database.getNumFailures() );
+ assertEquals( 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
}
}
--- /dev/null
+package org.apache.maven.archiva.reporting.reporter;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
+import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase;
+import org.apache.maven.archiva.reporting.model.MetadataResults;
+import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor;
+import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.digest.DigesterException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * ChecksumMetadataReporterTest
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class ChecksumMetadataReporterTest
+ extends AbstractChecksumArtifactReporterTestCase
+{
+ private ArtifactReportProcessor artifactReportProcessor;
+
+ private MetadataReportProcessor metadataReportProcessor;
+
+ private MetadataResultsDatabase database;
+
+ private ArtifactResultsDatabase artifactsDatabase;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+ metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+ database = (MetadataResultsDatabase) lookup( MetadataResultsDatabase.ROLE );
+ artifactsDatabase = (ArtifactResultsDatabase) lookup( ArtifactResultsDatabase.ROLE );
+ }
+
+ /**
+ * Test the valid checksum of a metadata file.
+ * The reportingDatabase should report 2 success validation.
+ */
+ public void testChecksumMetadataReporterSuccess()
+ throws DigesterException, IOException
+ {
+ createMetadataFile( "VALID" );
+ createMetadataFile( "INVALID" );
+
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+ //Version level metadata
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ //Artifact level metadata
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ //Group level metadata
+ metadata = new GroupRepositoryMetadata( "checksumTest" );
+ metadataReportProcessor.processMetadata( metadata, repository );
+ }
+
+ /**
+ * Test the corrupted checksum of a metadata file.
+ * The reportingDatabase must report 2 failures.
+ */
+ public void testChecksumMetadataReporterFailure()
+ {
+ Artifact artifact = createArtifact( "checksumTest", "invalidArtifact", "1.0" );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ Iterator failures = database.getIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ }
+
+ /**
+ * Test the conditional when the checksum files of the artifact & metadata do not exist.
+ */
+ public void testChecksumFilesDoNotExist()
+ throws DigesterException, IOException
+ {
+ createChecksumFile( "VALID" );
+ createMetadataFile( "VALID" );
+ deleteChecksumFiles( "jar" );
+
+ Artifact artifact = createArtifact( "checksumTest", "validArtifact", "1.0" );
+
+ artifactReportProcessor.processArtifact( artifact, null );
+ assertEquals( 1, artifactsDatabase.getNumFailures() );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository );
+
+ Iterator failures = database.getIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ MetadataResults results = (MetadataResults) failures.next();
+ failures = results.getFailures().iterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+
+ deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+ }
+
+}
* under the License.
*/
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.reporting.AbstractRepositoryReportsTestCase;
import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
import org.apache.maven.archiva.reporting.model.ArtifactResults;
import org.apache.maven.archiva.reporting.model.MetadataResults;
import org.apache.maven.archiva.reporting.model.Result;
import java.util.Iterator;
/**
+ * DefaultArtifactReporterTest
*
+ * @version $Id$
*/
public class DefaultArtifactReporterTest
extends AbstractRepositoryReportsTestCase
{
- private ReportingDatabase reportingDatabase;
+ private ReportingDatabase database;
private RepositoryMetadata metadata;
private Artifact artifact;
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ database = (ReportingDatabase) lookup( ReportingDatabase.ROLE );
+
+ ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+
+ metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+ }
+
public void testEmptyArtifactReporter()
{
- assertEquals( "No failures", 0, reportingDatabase.getNumFailures() );
- assertEquals( "No warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
- assertFalse( "No artifact failures", reportingDatabase.getArtifactIterator().hasNext() );
- assertFalse( "No metadata failures", reportingDatabase.getMetadataIterator().hasNext() );
+ assertEquals( "No failures", 0, database.getNumFailures() );
+ assertEquals( "No warnings", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
+ assertFalse( "No artifact failures", database.getArtifactIterator().hasNext() );
+ assertFalse( "No metadata failures", database.getMetadataIterator().hasNext() );
}
public void testMetadataSingleFailure()
{
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
- assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Single Failure Reason" );
+ assertEquals( "failures count", 1, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
private void assertMetadata( MetadataResults result )
{
- assertEquals( "check failure cause", metadata.getGroupId(), result.getGroupId() );
- assertEquals( "check failure cause", metadata.getArtifactId(), result.getArtifactId() );
- assertEquals( "check failure cause", metadata.getBaseVersion(), result.getVersion() );
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ assertEquals( "check failure cause", StringUtils.defaultString( metadata.getGroupId() ), result.getGroupId() );
+ assertEquals( "check failure cause", StringUtils.defaultString( metadata.getArtifactId() ), result
+ .getArtifactId() );
+ assertEquals( "check failure cause", StringUtils.defaultString( metadata.getBaseVersion() ), result
+ .getVersion() );
}
public void testMetadataMultipleFailures()
{
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
- reportingDatabase.addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
- assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "First Failure Reason" );
+ database.getMetadataDatabase().addFailure( metadata, PROCESSOR, PROBLEM, "Second Failure Reason" );
+ assertEquals( "failures count", 2, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getMetadataIterator();
+ Iterator failures = database.getMetadataIterator();
assertTrue( "check there is a failure", failures.hasNext() );
MetadataResults results = (MetadataResults) failures.next();
failures = results.getFailures().iterator();
public void testMetadataSingleWarning()
{
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Single Warning Message" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 1, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testMetadataMultipleWarnings()
{
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
- reportingDatabase.addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "First Warning" );
+ database.getMetadataDatabase().addWarning( metadata, PROCESSOR, PROBLEM, "Second Warning" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 2, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testMetadataSingleNotice()
{
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
- assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Single Notice Message" );
+ assertEquals( "failure count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check notices", 1, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getNotices().iterator();
public void testMetadataMultipleNotices()
{
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
- reportingDatabase.addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "First Notice" );
+ database.getMetadataDatabase().addNotice( metadata, PROCESSOR, PROBLEM, "Second Notice" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 2, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getMetadataIterator();
+ Iterator warnings = database.getMetadataIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
MetadataResults results = (MetadataResults) warnings.next();
warnings = results.getNotices().iterator();
public void testArtifactSingleFailure()
{
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
- assertEquals( "failures count", 1, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Single Failure Reason" );
+ assertEquals( "failures count", 1, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getArtifactIterator();
assertTrue( "check there is a failure", failures.hasNext() );
ArtifactResults results = (ArtifactResults) failures.next();
failures = results.getFailures().iterator();
private void assertArtifact( ArtifactResults results )
{
- assertEquals( "check failure cause", artifact.getGroupId(), results.getGroupId() );
- assertEquals( "check failure cause", artifact.getArtifactId(), results.getArtifactId() );
- assertEquals( "check failure cause", artifact.getVersion(), results.getVersion() );
- assertEquals( "check failure cause", artifact.getClassifier(), results.getClassifier() );
- assertEquals( "check failure cause", artifact.getType(), results.getType() );
+ /* The funky StringUtils.defaultString() is used because of database constraints.
+ * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version,
+ * type, classifier.
+ * This also means that none of those fields may be null. however, that doesn't eliminate the
+ * ability to have an empty string in place of a null.
+ */
+
+ assertEquals( "check failure cause", StringUtils.defaultString( artifact.getGroupId() ), results.getGroupId() );
+ assertEquals( "check failure cause", StringUtils.defaultString( artifact.getArtifactId() ), results
+ .getArtifactId() );
+ assertEquals( "check failure cause", StringUtils.defaultString( artifact.getVersion() ), results.getVersion() );
+ assertEquals( "check failure cause", StringUtils.defaultString( artifact.getClassifier() ), results
+ .getClassifier() );
+ assertEquals( "check failure cause", StringUtils.defaultString( artifact.getType() ), results.getType() );
}
public void testArtifactMultipleFailures()
{
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
- reportingDatabase.addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
- assertEquals( "failures count", 2, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "First Failure Reason" );
+ database.getArtifactDatabase().addFailure( artifact, PROCESSOR, PROBLEM, "Second Failure Reason" );
+ assertEquals( "failures count", 2, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator failures = reportingDatabase.getArtifactIterator();
+ Iterator failures = database.getArtifactIterator();
assertTrue( "check there is a failure", failures.hasNext() );
ArtifactResults results = (ArtifactResults) failures.next();
failures = results.getFailures().iterator();
public void testArtifactSingleWarning()
{
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Single Warning Message" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 1, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testArtifactMultipleWarnings()
{
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
- reportingDatabase.addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "First Warning" );
+ database.getArtifactDatabase().addWarning( artifact, PROCESSOR, PROBLEM, "Second Warning" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 2, database.getNumWarnings() );
+ assertEquals( "check no notices", 0, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getWarnings().iterator();
public void testArtifactSingleNotice()
{
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
- assertEquals( "failure count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check notices", 1, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Single Notice Message" );
+ assertEquals( "failure count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check notices", 1, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getNotices().iterator();
public void testArtifactMultipleNotices()
{
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
- reportingDatabase.addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
- assertEquals( "warnings count", 0, reportingDatabase.getNumFailures() );
- assertEquals( "warnings count", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 2, reportingDatabase.getNumNotices() );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "First Notice" );
+ database.getArtifactDatabase().addNotice( artifact, PROCESSOR, PROBLEM, "Second Notice" );
+ assertEquals( "warnings count", 0, database.getNumFailures() );
+ assertEquals( "warnings count", 0, database.getNumWarnings() );
+ assertEquals( "check no notices", 2, database.getNumNotices() );
- Iterator warnings = reportingDatabase.getArtifactIterator();
+ Iterator warnings = database.getArtifactIterator();
assertTrue( "check there is a failure", warnings.hasNext() );
ArtifactResults results = (ArtifactResults) warnings.next();
warnings = results.getNotices().iterator();
assertFalse( "no more warnings", warnings.hasNext() );
}
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
-
- metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
- }
}
--- /dev/null
+<component-set>
+ <components>
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+ </component>
+ </components>
+</component-set>
--- /dev/null
+# Set root logger level to DEBUG and its only appender to A1.
+log4j.rootLogger=WARN, A1
+
+# A1 is set to be a ConsoleAppender.
+log4j.appender.A1=org.apache.log4j.ConsoleAppender
+
+# A1 uses PatternLayout.
+log4j.appender.A1.layout=org.apache.log4j.PatternLayout
+log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+
<requirement>
<role>org.codehaus.plexus.digest.Digester</role>
<role-hint>md5</role-hint>
+ <field-name>digester</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory</role>
+ <field-name>indexFactory</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
</requirement>
</requirements>
<configuration>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldArtifactReportProcessor</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
+ </requirement>
+ </requirements>
<configuration>
<maxAge>10</maxAge>
</configuration>
<role>org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor</role>
<role-hint>old-snapshot-artifact</role-hint>
<implementation>org.apache.maven.archiva.reporting.processor.OldSnapshotArtifactReportProcessor</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase</role>
+ <field-name>database</field-name>
+ </requirement>
+ </requirements>
<configuration>
<maxAge>3600</maxAge>
<maxSnapshots>2</maxSnapshots>
<artifactId>plexus-component-api</artifactId>
</dependency>
</dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>merge</id>
+ <goals>
+ <goal>merge-descriptors</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+ <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
</project>
--- /dev/null
+// Auto generated dot file from plexus-graphing-graphviz.
+digraph gid {
+
+ // Graph Defaults
+ graph [
+ rankdir="TB"
+ ];
+
+ // Node Defaults.
+ node [
+ fontname="Helvetica",
+ fontsize="8",
+ shape="box"
+ ];
+
+ // Edge Defaults.
+ edge [
+ arrowsize="0.8"
+ fontsize="8",
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_REPOSITORY_LAYER10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-repository-layer\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" [
+ label="org.apache.maven.archiva\narchiva-webapp\n1.0-SNAPSHOT\nwar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" [
+ label="org.apache.maven.archiva\narchiva-plexus-application\n1.0-SNAPSHOT\nplexus-application",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-core\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-configuration\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-proxy\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-reports-standard\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CLI10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-cli\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-converter\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_APPLET10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-applet\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_RUNTIME10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-plexus-runtime\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-discoverer\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_SECURITY10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-security\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Node
+ "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+ label="org.apache.maven.archiva\narchiva-indexer\n1.0-SNAPSHOT\njar",
+ fontsize="8",
+ shape=box
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_APPLET10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" -> "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_RUNTIME10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PLEXUS_APPLICATION10_SNAPSHOTPLEXUS_APPLICATION" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CLI10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_INDEXER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPOSITORY_LAYER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_DISCOVERER10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_PROXY10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CONFIGURATION10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CONVERTER10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_REPORTS_STANDARD10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_CORE10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+
+ // Edge
+ "ORGAPACHEMAVENARCHIVAARCHIVA_WEBAPP10_SNAPSHOTWAR" -> "ORGAPACHEMAVENARCHIVAARCHIVA_SECURITY10_SNAPSHOTJAR" [
+ arrowtail=none,
+ arrowhead=normal
+ ];
+}
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-log4j-logging</artifactId>
- <version>1.1-alpha-2</version>
+ <artifactId>plexus-slf4j-logging</artifactId>
+ <version>1.1-alpha-1-SNAPSHOT</version>
<scope>runtime</scope>
</dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>1.2</version>
+ </dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-indexer</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-discoverer</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-configuration</artifactId>
+ <artifactId>archiva-proxy</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-proxy</artifactId>
+ <artifactId>archiva-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-core</artifactId>
+ <artifactId>archiva-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
+ <version>10.1.3.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<!DOCTYPE Configure PUBLIC "-//Mort Bay Consulting//DTD Configure//EN" "http://jetty.mortbay.org/configure.dtd">
<Configure class="org.mortbay.jetty.webapp.WebAppContext">
-<New id="validation_mail" class="org.mortbay.jetty.plus.naming.Resource">
- <Arg>mail/Session</Arg>
- <Arg>
- <New class="org.mortbay.naming.factories.MailSessionReference">
- <Set name="user"></Set>
- <Set name="password"></Set>
- <Set name="properties">
- <New class="java.util.Properties">
- <Put name="mail.smtp.host">localhost</Put>
- <Put name="mail.smtp.port">25</Put> <!-- TODO: shouldn't this default? -->
- </New>
- </Set>
- </New>
- </Arg>
-</New>
+ <New id="validation_mail" class="org.mortbay.jetty.plus.naming.Resource">
+ <Arg>mail/Session</Arg>
+ <Arg>
+ <New class="org.mortbay.naming.factories.MailSessionReference">
+ <Set name="user"></Set>
+ <Set name="password"></Set>
+ <Set name="properties">
+ <New class="java.util.Properties">
+ <Put name="mail.smtp.host">localhost</Put>
+ <Put name="mail.smtp.port">25</Put> <!-- TODO: shouldn't this default? -->
+ </New>
+ </Set>
+ </New>
+ </Arg>
+ </New>
+ <!-- Archiva database -->
+ <New id="archiva" class="org.mortbay.jetty.plus.naming.Resource">
+ <Arg>jdbc/archiva</Arg>
+ <Arg>
+ <New class="org.apache.derby.jdbc.EmbeddedDataSource">
+ <Set name="DatabaseName">target/databases/archiva</Set>
+ <Set name="user">sa</Set>
+ <Set name="createDatabase">create</Set>
+ </New>
+ </Arg>
+ </New>
+
+ <New id="archivaShutdown" class="org.mortbay.jetty.plus.naming.Resource">
+ <Arg>jdbc/archivaShutdown</Arg>
+ <Arg>
+ <New class="org.apache.derby.jdbc.EmbeddedDataSource">
+ <Set name="DatabaseName">target/databases/archiva</Set>
+ <Set name="user">sa</Set>
+ <Set name="shutdownDatabase">shutdown</Set>
+ </New>
+ </Arg>
+ </New>
+
+ <!-- Users / Security Database -->
<New id="users" class="org.mortbay.jetty.plus.naming.Resource">
<Arg>jdbc/users</Arg>
<Arg>
<New class="org.apache.derby.jdbc.EmbeddedDataSource">
- <Set name="DatabaseName">target/database</Set>
+ <Set name="DatabaseName">target/databases/users</Set>
<Set name="user">sa</Set>
<Set name="createDatabase">create</Set>
</New>
</Arg>
</New>
- <New id="shutdown" class="org.mortbay.jetty.plus.naming.Resource">
- <Arg>jdbc/shutdown</Arg>
+
+ <New id="usersShutdown" class="org.mortbay.jetty.plus.naming.Resource">
+ <Arg>jdbc/usersShutdown</Arg>
<Arg>
<New class="org.apache.derby.jdbc.EmbeddedDataSource">
- <Set name="DatabaseName">target/database</Set>
+ <Set name="DatabaseName">target/databases/users</Set>
<Set name="user">sa</Set>
<Set name="shutdownDatabase">shutdown</Set>
</New>
* under the License.
*/
-import com.opensymphony.xwork.Preparable;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.archiva.configuration.RepositoryConfiguration;
-import org.apache.maven.archiva.discoverer.DiscovererException;
-import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter;
-import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter;
import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.executor.ReportExecutor;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.store.ReportingStoreException;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.codehaus.plexus.security.rbac.Resource;
import org.codehaus.plexus.security.ui.web.interceptor.SecureAction;
import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle;
import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
-import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
-import java.util.Map;
/**
* Repository reporting.
*/
public class ReportsAction
extends PlexusActionSupport
- implements Preparable, SecureAction
+ implements SecureAction
{
/**
* @plexus.requirement
*/
- private ArchivaConfiguration archivaConfiguration;
+ private ReportingDatabase database;
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory factory;
-
- private List databases;
-
- private String repositoryId;
-
- /**
- * @plexus.requirement
- */
- private ReportExecutor executor;
-
- private Configuration configuration;
-
- /**
- * @plexus.requirement role="org.apache.maven.archiva.reporting.group.ReportGroup"
- */
- private Map reports;
-
- private String reportGroup = DEFAULT_REPORT_GROUP;
-
- private static final String DEFAULT_REPORT_GROUP = "health";
-
- private String filter;
+ private List reports;
public String execute()
throws Exception
{
- ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup );
-
- databases = new ArrayList();
-
- if ( repositoryId != null && !repositoryId.equals( "-" ) )
- {
- RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId );
- getReport( repositoryConfiguration, reportGroup );
- }
- else
- {
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
- getReport( repositoryConfiguration, reportGroup );
- }
- }
- return SUCCESS;
- }
-
- private void getReport( RepositoryConfiguration repositoryConfiguration, ReportGroup reportGroup )
- throws ReportingStoreException
- {
- ArtifactRepository repository = factory.createRepository( repositoryConfiguration );
-
- ReportingDatabase database = executor.getReportDatabase( repository, reportGroup );
-
- if ( filter != null && !filter.equals( "-" ) )
- {
- database = database.getFilteredDatabase( filter );
- }
-
- databases.add( database );
- }
-
- public String runReport()
- throws Exception
- {
- ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup );
-
- RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId );
- ArtifactRepository repository = factory.createRepository( repositoryConfiguration );
-
- ReportingDatabase database = executor.getReportDatabase( repository, reportGroup );
- if ( database.isInProgress() )
- {
- return SUCCESS;
- }
-
- generateReport( database, repositoryConfiguration, reportGroup, repository );
-
+ reports = database.getArtifactDatabase().getAllArtifactResults();
+
return SUCCESS;
}
- private void generateReport( ReportingDatabase database, RepositoryConfiguration repositoryConfiguration,
- ReportGroup reportGroup, ArtifactRepository repository )
- throws DiscovererException, ReportingStoreException
- {
- database.setInProgress( true );
-
- List blacklistedPatterns = new ArrayList();
- if ( repositoryConfiguration.getBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() );
- }
- if ( configuration.getGlobalBlackListPatterns() != null )
- {
- blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() );
- }
-
- ArtifactFilter filter;
- if ( repositoryConfiguration.isIncludeSnapshots() )
- {
- filter = new AcceptAllArtifactFilter();
- }
- else
- {
- filter = new SnapshotArtifactFilter();
- }
-
- try
- {
- executor.runReports( reportGroup, repository, blacklistedPatterns, filter );
- }
- finally
- {
- database.setInProgress( false );
- }
- }
-
- public void setReportGroup( String reportGroup )
- {
- this.reportGroup = reportGroup;
- }
-
- public String getReportGroup()
- {
- return reportGroup;
- }
-
- public String getRepositoryId()
- {
- return repositoryId;
- }
-
- public void setRepositoryId( String repositoryId )
- {
- this.repositoryId = repositoryId;
- }
-
- public List getDatabases()
- {
- return databases;
- }
-
- public void prepare()
- throws Exception
- {
- configuration = archivaConfiguration.getConfiguration();
- }
-
- public Configuration getConfiguration()
- {
- return configuration;
- }
-
- public Map getReports()
- {
- return reports;
- }
-
- public String getFilter()
- {
- return filter;
- }
-
- public void setFilter( String filter )
- {
- this.filter = filter;
- }
-
public SecureActionBundle getSecureActionBundle()
throws SecureActionException
{
return bundle;
}
+
+ public List getReports()
+ {
+ return reports;
+ }
}
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord;
import org.apache.maven.archiva.proxy.ProxyException;
+import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase;
import org.apache.maven.archiva.web.util.VersionMerger;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
* @plexus.requirement
*/
private DependencyTreeBuilder dependencyTreeBuilder;
+
+ /**
+ * @plexus.requirement
+ */
+ ArtifactResultsDatabase artifactsDatabase;
private String groupId;
private String artifactPath;
private List mailingLists;
+
+ private List reports;
public String artifact()
throws IOException, XmlPullParserException, ProjectBuildingException, ResourceDoesNotExistException,
return SUCCESS;
}
+
+ public String reports()
+ throws IOException, XmlPullParserException, ProjectBuildingException
+ {
+ if ( !checkParameters() )
+ {
+ return ERROR;
+ }
+
+ System.out.println("#### In reports.");
+ this.reports = artifactsDatabase.findArtifactResults( groupId, artifactId, version );
+ System.out.println("#### Found " + reports.size() + " reports.");
+
+ return SUCCESS;
+ }
public String dependees()
throws IOException, XmlPullParserException, ProjectBuildingException, RepositoryIndexException,
{
return repositoryUrlName;
}
+
+ public List getReports()
+ {
+ return reports;
+ }
}
import com.opensymphony.xwork.ModelDriven;
import com.opensymphony.xwork.Preparable;
import com.opensymphony.xwork.Validateable;
+
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.InvalidConfigurationException;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
+import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.scheduler.CronExpressionValidator;
private ArchivaConfiguration archivaConfiguration;
/**
- * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
+ * @plexus.requirement
*/
- private IndexerTaskExecutor indexer;
+ private ActiveManagedRepositories activeRepositories;
/**
* The configuration.
*/
private Configuration configuration;
-
+
private CronExpressionValidator cronValidator;
private String second = "0";
{
// TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
// TODO: if this is changed, do we move the index or recreate it?
- configuration.setIndexerCronExpression( getCronExpression() );
+ configuration.setDataRefreshCronExpression( getCronExpression() );
// Normalize the path
File file = new File( configuration.getIndexPath() );
public String input()
{
- String[] cronEx = configuration.getIndexerCronExpression().split( " " );
+ String[] cronEx = configuration.getDataRefreshCronExpression().split( " " );
int i = 0;
while ( i < cronEx.length )
i++;
}
- if ( indexer.getLastIndexingTime() != 0 )
+ if ( activeRepositories.getLastDataRefreshTime() != 0 )
{
- lastIndexingTime = new Date( indexer.getLastIndexingTime() ).toString();
+ lastIndexingTime = new Date( activeRepositories.getLastDataRefreshTime() ).toString();
}
else
{
*/
import org.apache.maven.archiva.scheduler.RepositoryTaskScheduler;
-import org.apache.maven.archiva.scheduler.TaskExecutionException;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.security.rbac.Resource;
import org.codehaus.plexus.security.ui.web.interceptor.SecureAction;
import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle;
import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException;
+import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
/**
*/
private RepositoryTaskScheduler taskScheduler;
- public String runIndexer()
+ public String runRefresh()
throws TaskExecutionException
{
- taskScheduler.runIndexer();
+ taskScheduler.runDataRefresh();
return SUCCESS;
}
/**
* AuditLog - Audit Log.
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.web.repository.AuditLog"
/**
* ProxiedDavServer
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.codehaus.plexus.webdav.DavServerComponent"
* role-hint="proxied"
/**
* RepositoryServlet
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryServlet
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.artifact.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
import org.apache.maven.project.ProjectBuildingException;
/**
* DownloadArtifact
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="com.opensymphony.webwork.components.Component" role-hint="download-artifact"
/**
* DownloadArtifactTag
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class DownloadArtifactTag
/**
* ExpressionTool
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ExpressionTool
/**
* GroupIdLink
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class GroupIdLink
/**
* GroupIdLink
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class GroupIdLinkTag
/**
* PlexusTagUtil
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class PlexusTagUtil
</configuration>
</component>
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DataSourceConfigurableJdoFactory</implementation>
+ <configuration>
+
+ <connectionFactoryName>java:comp/env/jdbc/archiva</connectionFactoryName>
+ <shutdownConnectionFactoryName>java:comp/env/jdbc/archivaShutdown</shutdownConnectionFactoryName>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ <!-- NEEDED FOR POSTGRES, But causes problems in other JDBC implementations.
+ <property>
+ <name>org.jpox.identifier.case</name>
+ <value>PreserveCase</value>
+ </property>
+ -->
+ </otherProperties>
+ </configuration>
+ </component>
+
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>users</role-hint>
<name>org.jpox.rdbms.dateTimezone</name>
<value>JDK_DEFAULT_TIMEZONE</value>
</property>
-<!--
+ <!-- NEEDED FOR POSTGRES, But causes problems in other JDBC implementations.
<property>
<name>org.jpox.identifier.case</name>
<value>PreserveCase</value>
</property>
--->
+ -->
</otherProperties>
</configuration>
</component>
-->
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
- <implementation>org.codehaus.plexus.logging.log4j.Log4JLoggerManager</implementation>
+ <implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
-
- <configuration>
- <threshold>WARN</threshold>
- <default-appender>console,rolling</default-appender>
-
- <appenders>
- <appender>
- <id>console</id>
- <threshold>DEBUG</threshold>
- <type>org.apache.log4j.ConsoleAppender</type>
- <conversion-pattern>%d [%t] %-5p %-30c{1} - %m%n</conversion-pattern>
- </appender>
-
- <appender>
- <id>rolling</id>
- <threshold>DEBUG</threshold>
- <type>org.apache.log4j.DailyRollingFileAppender</type>
- <conversion-pattern>%-4r [%t] %-5p %c %x - %m%n</conversion-pattern>
-
- <properties>
- <property>
- <name>file</name>
- <value>${appserver.base}/logs/archiva.log</value>
- </property>
- <property>
- <name>append</name>
- <value>true</value>
- </property>
- <property>
- <name>datePattern</name>
- <value>'.'yyyy-MM-dd</value>
- </property>
- </properties>
- </appender>
- </appenders>
-
- <levels>
- <!-- Help identify bugs during testing -->
- <level>
- <hierarchy>org.apache.maven</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>org.codehaus.plexus.security</hierarchy>
- <level>INFO</level>
- </level>
- <!-- squelch noisy objects (for now) -->
- <level>
- <hierarchy>org.codehaus.plexus.mailsender.MailSender</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>org.quartz</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>org.apache.jasper</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>com.opensymphony.xwork</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>com.opensymphony.webwork</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>org.codehaus.plexus.PlexusContainer</hierarchy>
- <level>INFO</level>
- </level>
- <level>
- <hierarchy>JPOX</hierarchy>
- <level>WARN</level>
- </level>
- <level>
- <hierarchy>JPOX.MetaData</hierarchy>
- <level>ERROR</level>
- </level>
- <!--
- <level>
- <hierarchy>JPOX.RDBMS.SQL</hierarchy>
- <level>DEBUG</level>
- </level>
- -->
- <level>
- <hierarchy>freemarker</hierarchy>
- <level>WARN</level>
- </level>
- </levels>
- </configuration>
</component>
-
</components>
<load-on-start>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
- <role-hint>indexer</role-hint>
+ <role-hint>data-refresh</role-hint>
</component>
</load-on-start>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+
+ <appender name="rolling" class="org.apache.log4j.DailyRollingFileAppender">
+ <param name="file" value="${appserver.base}/logs/archiva.log" />
+ <param name="append" value="true" />
+ <param name="datePattern" value="'.'yyyy-MM-dd" />
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%-4r [%t] %-5p %c %x - %m%n"/>
+ </layout>
+ </appender>
+
+ <appender name="console" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out"/>
+ <layout class="org.apache.log4j.PatternLayout">
+ <param name="ConversionPattern" value="%d [%t] %-5p %-30c{1} - %m%n"/>
+ </layout>
+ </appender>
+
+ <!-- Help identify bugs during testing -->
+ <logger name="org.apache.maven">
+ <level value="info"/>
+ </logger>
+
+ <logger name="org.codehaus.plexus.security">
+ <level value="info"/>
+ </logger>
+
+ <!-- squelch noisy objects (for now) -->
+ <logger name="org.codehaus.plexus.mailsender.MailSender">
+ <level value="info"/>
+ </logger>
+
+ <logger name="org.quartz">
+ <level value="info"/>
+ </logger>
+
+ <logger name="org.apache.jasper">
+ <level value="info"/>
+ </logger>
+
+ <logger name="com.opensymphony.xwork">
+ <level value="info"/>
+ </logger>
+
+ <logger name="com.opensymphony.webwork">
+ <level value="info"/>
+ </logger>
+
+ <logger name="org.codehaus.plexus.PlexusContainer">
+ <level value="info"/>
+ </logger>
+
+ <logger name="JPOX">
+ <level value="warn"/>
+ </logger>
+
+ <logger name="JPOX.MetaData">
+ <level value="error"/>
+ </logger>
+
+<!--
+ <logger name="JPOX.RDBMS.SQL">
+ <level value="debug"/>
+ </logger>
+ -->
+
+ <logger name="freemarker">
+ <level value="warn"/>
+ </logger>
+
+ <root>
+ <priority value ="warn" />
+ <appender-ref ref="console" />
+ <appender-ref ref="rolling" />
+ </root>
+
+</log4j:configuration>
<param name="actionName">login</param>
<param name="namespace">/security</param>
</result>
+ <result name="requires-authorization" type="redirect-action">
+ <param name="actionName">login</param>
+ <param name="namespace">/security</param>
+ </result>
<result name="security-register-success" type="redirect-action">
<param name="actionName">login</param>
<param name="namespace">/security</param>
<result>/WEB-INF/jsp/showArtifact.jsp</result>
</action>
+ <action name="showArtifactReports" class="showArtifactAction" method="reports">
+ <result>/WEB-INF/jsp/showArtifact.jsp</result>
+ </action>
+
<action name="showArtifactDependencies" class="showArtifactAction" method="dependencies">
<result>/WEB-INF/jsp/showArtifact.jsp</result>
</action>
--- /dev/null
+<%--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --%>
+
+<%@ taglib prefix="ww" uri="/webwork" %>
+<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
+<%@ taglib prefix="my" tagdir="/WEB-INF/tags" %>
+
+<c:forEach items="${reports}" var="report">
+ <h3>
+ ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type}
+ </h3>
+ <ul>
+ <c:forEach items="${repor.results}" var="result">
+ <li>
+ <b>${result.reason}</b>
+ </li>
+ </c:forEach>
+ </ul>
+</c:forEach>
+<c:if test="${empty(reports)}">
+ <strong>No reports for this artifact.</strong>
+</c:if>
<html>
<head>
- <ww:set name="reports" value="reports"/>
- <ww:set name="reportGroup" value="reportGroup"/>
- <title>Report: ${reports[reportGroup].name}</title>
+ <title>Reports</title>
<ww:head/>
</head>
<div id="contentArea">
-<pss:ifAnyAuthorized permissions="archiva-access-reports">
- <ww:form action="reports" namespace="/admin">
- <ww:select list="reports" label="Report" name="reportGroup" onchange="document.reports.submit();"/>
- <ww:select list="configuration.repositories" listKey="id" listValue="name" label="Repository" headerKey="-"
- headerValue="(All repositories)" name="repositoryId" onchange="document.reports.submit();"/>
- <ww:select list="reports[reportGroup].reports" label="Filter" headerKey="-" headerValue="(All Problems)"
- name="filter" onchange="document.reports.submit();"/>
- <ww:submit value="Get Report"/>
- </ww:form>
-</pss:ifAnyAuthorized>
-
-<ww:set name="databases" value="databases"/>
-<c:forEach items="${databases}" var="database">
-<div>
-<div style="float: right">
- <%-- TODO!
- <a href="#">Repair all</a>
- |
- --%>
- <c:choose>
- <c:when test="${!database.inProgress}">
- <pss:ifAuthorized permission="archiva-access-reports">
- <ww:url id="regenerateReportUrl" action="runReport" namespace="/admin">
- <ww:param name="repositoryId">${database.repository.id}</ww:param>
- <ww:param name="reportGroup" value="reportGroup"/>
- </ww:url>
- <ww:a href="%{regenerateReportUrl}">Regenerate Report</ww:a>
- </pss:ifAuthorized>
- </c:when>
- <c:otherwise>
- <!-- TODO: would be good to have a generic task/job mechanism that tracked progress and ability to run
- concurrently -->
- <span style="color: gray;">Report in progress</span>
- </c:otherwise>
- </c:choose>
-</div>
-<h2>Repository: ${database.repository.name}</h2>
-
-<p>
- <c:choose>
- <c:when test="${!empty(database.reporting.lastModified)}">
- Status:
- <img src="<c:url value="/images/icon_error_sml.gif"/>" width="15" height="15" alt=""/>
- ${database.numFailures}
- <img src="<c:url value="/images/icon_warning_sml.gif"/>" width="15" height="15" alt=""/>
- ${database.numWarnings}
- <img src="<c:url value="/images/icon_info_sml.gif"/>" width="15" height="15" alt=""/>
- ${database.numNotices}
-
- <span style="font-size: x-small">
- <jsp:useBean id="date" class="java.util.Date"/>
- <c:set target="${date}" property="time" value="${database.reporting.lastModified}"/>
- Last updated: <fmt:formatDate type="both" value="${date}"/>,
- execution time: <fmt:formatNumber maxFractionDigits="0" value="${database.reporting.executionTime / 60000}"/> minutes
- <fmt:formatNumber maxFractionDigits="0" value="${(database.reporting.executionTime / 1000) % 60}"/> seconds
- </span>
- </c:when>
- <c:otherwise>
- <b>
- This report has not yet been generated. <a href="${url}">Generate Report</a>
- </b>
- </c:otherwise>
- </c:choose>
-</p>
-
- <%-- TODO need to protect iterations against concurrent modification exceptions by cloning the lists synchronously --%>
- <%-- TODO! paginate (displaytag?) --%>
-<c:if test="${!empty(database.reporting.artifacts)}">
- <h3>Artifacts</h3>
- <c:forEach items="${database.reporting.artifacts}" var="artifact" begin="0" end="2">
- <ul>
- <c:forEach items="${artifact.failures}" var="result">
- <li class="errorBullet">${result.reason}</li>
- </c:forEach>
- <c:forEach items="${artifact.warnings}" var="result">
- <li class="warningBullet">${result.reason}</li>
- </c:forEach>
- <c:forEach items="${artifact.notices}" var="result">
- <li class="infoBullet">${result.reason}</li>
- </c:forEach>
- </ul>
- <p style="text-indent: 3em;">
- <my:showArtifactLink groupId="${artifact.groupId}" artifactId="${artifact.artifactId}"
- version="${artifact.version}" classifier="${artifact.classifier}"/>
- </p>
- <%-- TODO!
- <td>
- <a href="#">Repair</a>
- </td>
- --%>
- </c:forEach>
- <c:if test="${fn:length(database.reporting.artifacts) gt 3}">
- <p>
- <b>... more ...</b>
- </p>
- </c:if>
-</c:if>
-<c:if test="${!empty(database.metadataWithProblems)}">
- <h3>Metadata</h3>
- <c:forEach items="${database.metadataWithProblems}" var="metadata" begin="0" end="2">
- <ul>
- <c:forEach items="${metadata.failures}" var="result">
- <li class="errorBullet">${result.reason}</li>
- </c:forEach>
- <c:forEach items="${metadata.warnings}" var="result">
- <li class="warningBullet">${result.reason}</li>
- </c:forEach>
- <c:forEach items="${metadata.notices}" var="result">
- <li class="infoBullet">${result.reason}</li>
- </c:forEach>
- </ul>
- <p style="text-indent: 3em;">
- <my:showArtifactLink groupId="${metadata.groupId}" artifactId="${metadata.artifactId}"
- version="${metadata.version}"/>
- </p>
- <%-- TODO!
- <td>
- <a href="#">Repair</a>
- </td>
- --%>
- </c:forEach>
- <c:if test="${fn:length(database.metadataWithProblems) gt 3}">
- <p>
- <b>... more ...</b>
- </p>
- </c:if>
-</c:if>
-</div>
+<c:forEach items="${reports}" var="report">
+ <h3>
+ ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type}
+ </h3>
+ <ul>
+ <c:forEach items="${repor.results}" var="result">
+ <li>
+ <b>${result.reason}</b>
+ </li>
+ </c:forEach>
+ </ul>
</c:forEach>
+<c:if test="${empty(reports)}">
+ <strong>No reports for any artifact.</strong>
+</c:if>
+
</div>
</body>
<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
<%@ taglib prefix="my" tagdir="/WEB-INF/tags" %>
<%@ taglib prefix="archiva" uri="http://maven.apache.org/archiva" %>
+<%@ taglib prefix="pss" uri="/plexusSecuritySystem" %>
<html>
<head>
</ww:url>
</c:set>
<my:currentWWUrl url="${url}">Mailing Lists</my:currentWWUrl>
+ <pss:ifAnyAuthorized permissions="archiva-access-reports">
+ <c:set var="url">
+ <ww:url action="showArtifactReports">
+ <ww:param name="groupId" value="%{groupId}"/>
+ <ww:param name="artifactId" value="%{artifactId}"/>
+ <ww:param name="version" value="%{version}"/>
+ </ww:url>
+ </c:set>
+ <my:currentWWUrl url="${url}">Reports</my:currentWWUrl>
+ </pss:ifAnyAuthorized>
+
</span>
</div>
<c:when test="${mailingLists != null}">
<%@ include file="/WEB-INF/jsp/include/mailingLists.jspf" %>
</c:when>
+ <c:when test="${reports != null}">
+ <%@ include file="/WEB-INF/jsp/include/artifactReports.jspf" %>
+ </c:when>
<c:otherwise>
<%@ include file="/WEB-INF/jsp/include/artifactInfo.jspf" %>
</c:otherwise>
<version>4</version>
<relativePath>../pom/maven/pom.xml</relativePath>
</parent>
+ <prerequisites>
+ <maven>2.0.5</maven>
+ </prerequisites>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva</artifactId>
<packaging>pom</packaging>
<version>1.3.3</version>
<executions>
<execution>
+ <id>generate</id>
<goals>
<goal>descriptor</goal>
- <goal>merge-descriptors</goal>
</goals>
</execution>
</executions>
<modules>
<module>archiva-applet</module>
<module>archiva-converter</module>
+ <module>archiva-common</module>
<module>archiva-discoverer</module>
<module>archiva-reports-standard</module>
<module>archiva-indexer</module>
<artifactId>maven-app-configuration-web</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
- <!--
- Rejected Plexus Container / Component Versions:
- 1.0-alpha-11
- 2007-01-17 11:40:40.371::WARN: Failed startup of context org.mortbay.jetty.webapp.WebAppContext@553763
- {/,/home/joakim/code/maven/trunks/archiva/archiva-webapp/src/main/webapp}
- java.lang.NullPointerException
- at org.codehaus.plexus.classworlds.strategy.DefaultStrategy.getResource(DefaultStrategy.java:99)
- at org.codehaus.plexus.classworlds.strategy.ForeignStrategy.getResource(ForeignStrategy.java:54)
- at org.codehaus.plexus.classworlds.strategy.DefaultStrategy.getResourceAsStream(DefaultStrategy.java:107)
- at org.codehaus.plexus.classworlds.realm.ClassRealm.getResourceAsStream(ClassRealm.java:207)
- at org.codehaus.plexus.DefaultPlexusContainer.<init>(DefaultPlexusContainer.java:244)
-
- 1.0-alpha-12
- 1.0-alpha-13
- 1.0-alpha-14
- Caused by: org.codehaus.plexus.PlexusContainerException: The specified user configuration
- 'file:/home/joakim/code/maven/trunks/archiva/archiva-webapp/src/main/webapp/WEB-INF/classes/META-INF/plexus/application.xml' is null.
-
- 1.0-alpha-15
- The resolution of ${configuration.store.file} is never attempted.
-
- 1.0-alpha-16-SNAPSHOT
- Incompatible with plexus-xwork-integration
- -->
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-container-default</artifactId>
<artifactId>wagon-http-lightweight</artifactId>
<version>${wagon.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-common</artifactId>
+ <version>${pom.version}</version>
+ </dependency>
+ <!--
+ <dependency>
+ <groupId>org.apache.maven.archiva</groupId>
+ <artifactId>archiva-common</artifactId>
+ <version>${pom.version}</version>
+ <classifier>tests</classifier>
+ <scope>test</scope>
+ </dependency>
+ -->
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-core</artifactId>
</build>
</profile>
</profiles>
- <!-- TODO: remove once modello is released -->
- <pluginRepositories>
- <pluginRepository>
- <id>codehaus.org</id>
- <url>http://snapshots.repository.codehaus.org</url>
- </pluginRepository>
- </pluginRepositories>
- <!-- TODO: remove once ehcache, p-sec, registry, webdav, xwork, naming released -->
<repositories>
<repository>
<id>codehaus.org</id>
+ <url>http://repository.codehaus.org</url>
+ <releases>
+ <enabled>true</enabled>
+ </releases>
+ <snapshots>
+ <enabled>false</enabled>
+ </snapshots>
+ </repository>
+ <!-- TODO: remove once ehcache, p-sec, registry, webdav, xwork, naming released -->
+ <repository>
+ <id>snapshots.codehaus.org</id>
<url>http://snapshots.repository.codehaus.org</url>
<releases>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
+ <!-- TODO: remove once modello is released -->
+ <pluginRepositories>
+ <pluginRepository>
+ <id>snapshots.codehaus.org</id>
+ <url>http://snapshots.repository.codehaus.org</url>
+ <releases>
+ <enabled>false</enabled>
+ </releases>
+ <snapshots>
+ <enabled>true</enabled>
+ </snapshots>
+ </pluginRepository>
+ </pluginRepositories>
<properties>
<maven.version>2.0.5</maven.version>
<wagon.version>1.0-beta-2</wagon.version>