From: Joakim Erdfelt Date: Fri, 23 Feb 2007 19:05:21 +0000 (+0000) Subject: Merge from archiva-MRM-239 branch to trunk. r506385:HEAD X-Git-Tag: archiva-0.9-alpha-1~99 X-Git-Url: https://source.dussan.org/?a=commitdiff_plain;h=dee0d5a300ee0ba0240efc1277428b52cdcec9e0;p=archiva.git Merge from archiva-MRM-239 branch to trunk. r506385:HEAD git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@511053 13f79535-47bb-0310-9956-ffa450edef68 --- diff --git a/archiva-cli/pom.xml b/archiva-cli/pom.xml index 9a25de821..17a2b9e59 100644 --- a/archiva-cli/pom.xml +++ b/archiva-cli/pom.xml @@ -30,7 +30,7 @@ org.apache.maven.archiva - archiva-core + archiva-converter org.codehaus.plexus diff --git a/archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java b/archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java index f9ba5cc9e..26ee4db77 100644 --- a/archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java +++ b/archiva-cli/src/main/java/org/apache/maven/archiva/cli/ArchivaCli.java @@ -23,9 +23,8 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.conversion.LegacyRepositoryConverter; import org.apache.maven.archiva.converter.RepositoryConversionException; -import org.apache.maven.archiva.discoverer.DiscovererException; +import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.tools.cli.AbstractCli; @@ -103,28 +102,25 @@ public class ArchivaCli System.out.println( "Converting " + oldRepositoryPath + " to " + newRepositoryPath ); - List blacklistedPatterns = null; + List fileExclusionPatterns = null; String s = p.getProperty( BLACKLISTED_PATTERNS ); if ( s != null ) { - blacklistedPatterns = Arrays.asList( StringUtils.split( s, "," ) ); + fileExclusionPatterns = Arrays.asList( StringUtils.split( s, "," ) ); } try { legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath, - blacklistedPatterns, true ); + fileExclusionPatterns, + true ); } catch ( RepositoryConversionException e ) { showFatalError( "Error converting repository.", e, true ); } - catch ( DiscovererException e ) - { - showFatalError( "Error discovery artifacts to convert.", e, true ); - } } } } diff --git a/archiva-common/pom.xml b/archiva-common/pom.xml new file mode 100644 index 000000000..47d3c6223 --- /dev/null +++ b/archiva-common/pom.xml @@ -0,0 +1,96 @@ + + + + + + archiva + org.apache.maven.archiva + 1.0-SNAPSHOT + + 4.0.0 + archiva-common + Archiva Common + + + + org.codehaus.plexus + plexus-component-api + + + org.apache.maven + maven-artifact-manager + + + org.apache.maven + maven-project + + + commons-lang + commons-lang + + + org.codehaus.plexus + plexus-container-default + + + + + + + org.codehaus.plexus + plexus-maven-plugin + + + + + diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java new file mode 100644 index 000000000..c807d70c4 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/ArchivaException.java @@ -0,0 +1,40 @@ +package org.apache.maven.archiva.common; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * ArchivaException + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ArchivaException + extends Exception +{ + public ArchivaException( String message, Throwable cause ) + { + super( message, cause ); + } + + public ArchivaException( String message ) + { + super( message ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java new file mode 100644 index 000000000..b77826ad1 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilder.java @@ -0,0 +1,55 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.factory.ArtifactFactory; + +/** + * AbstractLayoutArtifactBuilder + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractLayoutArtifactBuilder + implements LayoutArtifactBuilder +{ + /** + * @plexus.requirement + */ + protected ArtifactFactory artifactFactory; + + /** + * Constructor used by plexus + */ + public AbstractLayoutArtifactBuilder() + { + + } + + /** + * Constructor used by manual process. + * + * @param artifactFactory the artifact factory to use. + */ + public AbstractLayoutArtifactBuilder( ArtifactFactory artifactFactory ) + { + this.artifactFactory = artifactFactory; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java new file mode 100644 index 000000000..0845dc750 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/BuilderException.java @@ -0,0 +1,43 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.ArchivaException; + +/** + * BuilderException - used to indicate a problem during the building of an object from file. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class BuilderException + extends ArchivaException +{ + + public BuilderException( String message, Throwable cause ) + { + super( message, cause ); + } + + public BuilderException( String message ) + { + super( message ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java new file mode 100644 index 000000000..bfee01508 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilder.java @@ -0,0 +1,218 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.factory.ArtifactFactory; +import org.codehaus.plexus.util.StringUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.StringTokenizer; + +/** + * DefaultLayoutArtifactBuilder - artifact builder for default layout repositories. + * + * @author Brett Porter + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder" + * role-hint="default" + */ +public class DefaultLayoutArtifactBuilder + extends AbstractLayoutArtifactBuilder + implements LayoutArtifactBuilder +{ + public DefaultLayoutArtifactBuilder() + { + super(); + } + + public DefaultLayoutArtifactBuilder( ArtifactFactory artifactFactory ) + { + super( artifactFactory ); + } + + public Artifact build( String pathToArtifact ) + throws BuilderException + { + if( artifactFactory == null ) + { + throw new IllegalStateException( "Unable to build artifact with a null artifactFactory." ); + } + + List pathParts = new ArrayList(); + StringTokenizer st = new StringTokenizer( pathToArtifact, "/\\" ); + while ( st.hasMoreTokens() ) + { + pathParts.add( st.nextToken() ); + } + + Collections.reverse( pathParts ); + + Artifact artifact; + if ( pathParts.size() >= 4 ) + { + // maven 2.x path + + // the actual artifact filename. + String filename = (String) pathParts.remove( 0 ); + + // the next one is the version. + String version = (String) pathParts.remove( 0 ); + + // the next one is the artifactId. + String artifactId = (String) pathParts.remove( 0 ); + + // the remaining are the groupId. + Collections.reverse( pathParts ); + String groupId = StringUtils.join( pathParts.iterator(), "." ); + + String remainingFilename = filename; + if ( remainingFilename.startsWith( artifactId + "-" ) ) + { + remainingFilename = remainingFilename.substring( artifactId.length() + 1 ); + + String classifier = null; + + // TODO: use artifact handler, share with legacy discoverer + String type; + if ( remainingFilename.endsWith( ".tar.gz" ) ) + { + type = "distribution-tgz"; + remainingFilename = remainingFilename + .substring( 0, remainingFilename.length() - ".tar.gz".length() ); + } + else if ( remainingFilename.endsWith( ".zip" ) ) + { + type = "distribution-zip"; + remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() ); + } + else if ( remainingFilename.endsWith( "-test-sources.jar" ) ) + { + type = "java-source"; + classifier = "test-sources"; + remainingFilename = remainingFilename.substring( 0, remainingFilename.length() + - "-test-sources.jar".length() ); + } + else if ( remainingFilename.endsWith( "-sources.jar" ) ) + { + type = "java-source"; + classifier = "sources"; + remainingFilename = remainingFilename.substring( 0, remainingFilename.length() + - "-sources.jar".length() ); + } + else + { + int index = remainingFilename.lastIndexOf( "." ); + if ( index >= 0 ) + { + type = remainingFilename.substring( index + 1 ); + remainingFilename = remainingFilename.substring( 0, index ); + } + else + { + throw new BuilderException( "Path filename does not have an extension." ); + } + } + + Artifact result; + if ( classifier == null ) + { + result = artifactFactory + .createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type ); + } + else + { + result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, + classifier ); + } + + if ( result.isSnapshot() ) + { + // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b + int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 ); + if ( classifierIndex >= 0 ) + { + classifier = remainingFilename.substring( classifierIndex + 1 ); + remainingFilename = remainingFilename.substring( 0, classifierIndex ); + result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename, + type, classifier ); + } + else + { + result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename, + Artifact.SCOPE_RUNTIME, type ); + } + + // poor encapsulation requires we do this to populate base version + if ( !result.isSnapshot() ) + { + throw new BuilderException( "Failed to create a snapshot artifact: " + result ); + } + else if ( !result.getBaseVersion().equals( version ) ) + { + throw new BuilderException( + "Built snapshot artifact base version does not match path version: " + + result.getBaseVersion() + "; should have been version: " + + version ); + } + else + { + artifact = result; + } + } + else if ( !remainingFilename.startsWith( version ) ) + { + throw new BuilderException( "Built artifact version does not match path version" ); + } + else if ( !remainingFilename.equals( version ) ) + { + if ( remainingFilename.charAt( version.length() ) == '-' ) + { + classifier = remainingFilename.substring( version.length() + 1 ); + artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, + classifier ); + } + else + { + throw new BuilderException( "Path version does not corresspond to an artifact version" ); + } + } + else + { + artifact = result; + } + } + else + { + throw new BuilderException( "Path filename does not correspond to an artifact." ); + } + } + else + { + throw new BuilderException( "Path is too short to build an artifact from." ); + } + + return artifact; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java new file mode 100644 index 000000000..494a4a7c1 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LayoutArtifactBuilder.java @@ -0,0 +1,36 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; + +/** + * LayoutArtifactBuilder + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @todo this concept should really exist inside of the {@link ArtifactRepositoryLayout} object in maven itself. + */ +public interface LayoutArtifactBuilder +{ + public Artifact build( String pathToArtifact ) throws BuilderException; +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java new file mode 100644 index 000000000..e3436e966 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilder.java @@ -0,0 +1,303 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.factory.ArtifactFactory; + +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.StringTokenizer; + +/** + * LegacyLayoutArtifactBuilder + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder" + * role-hint="legacy" + */ +public class LegacyLayoutArtifactBuilder + extends AbstractLayoutArtifactBuilder + implements LayoutArtifactBuilder +{ + public LegacyLayoutArtifactBuilder() + { + super(); + } + + public LegacyLayoutArtifactBuilder( ArtifactFactory artifactFactory ) + { + super( artifactFactory ); + } + + public Artifact build( String pathToArtifact ) + throws BuilderException + { + if( artifactFactory == null ) + { + throw new IllegalStateException( "Unable to build legacy artifact with a null artifactFactory." ); + } + + StringTokenizer tokens = new StringTokenizer( pathToArtifact, "/\\" ); + + Artifact result; + + int numberOfTokens = tokens.countTokens(); + + if ( numberOfTokens == 3 ) + { + String groupId = tokens.nextToken(); + + String type = tokens.nextToken(); + + if ( type.endsWith( "s" ) ) + { + type = type.substring( 0, type.length() - 1 ); + + // contains artifactId, version, classifier, and extension. + String avceGlob = tokens.nextToken(); + + //noinspection CollectionDeclaredAsConcreteClass + LinkedList avceTokenList = new LinkedList(); + + StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" ); + while ( avceTokenizer.hasMoreTokens() ) + { + avceTokenList.addLast( avceTokenizer.nextToken() ); + } + + String lastAvceToken = (String) avceTokenList.removeLast(); + + // TODO: share with other discoverer, use artifact handlers instead + if ( lastAvceToken.endsWith( ".tar.gz" ) ) + { + type = "distribution-tgz"; + + lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() ); + + avceTokenList.addLast( lastAvceToken ); + } + else if ( lastAvceToken.endsWith( "sources.jar" ) ) + { + type = "java-source"; + + lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() ); + + avceTokenList.addLast( lastAvceToken ); + } + else if ( lastAvceToken.endsWith( "javadoc.jar" ) ) + { + type = "javadoc.jar"; + + lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() ); + + avceTokenList.addLast( lastAvceToken ); + } + else if ( lastAvceToken.endsWith( ".zip" ) ) + { + type = "distribution-zip"; + + lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() ); + + avceTokenList.addLast( lastAvceToken ); + } + else + { + int extPos = lastAvceToken.lastIndexOf( '.' ); + + if ( extPos > 0 ) + { + String ext = lastAvceToken.substring( extPos + 1 ); + if ( type.equals( ext ) || "plugin".equals( type ) ) + { + lastAvceToken = lastAvceToken.substring( 0, extPos ); + + avceTokenList.addLast( lastAvceToken ); + } + else + { + throw new BuilderException( "Path type does not match the extension" ); + } + } + else + { + throw new BuilderException( "Path filename does not have an extension" ); + } + } + + // let's discover the version, and whatever's leftover will be either + // a classifier, or part of the artifactId, depending on position. + // Since version is at the end, we have to move in from the back. + Collections.reverse( avceTokenList ); + + // TODO: this is obscene - surely a better way? + String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" + + "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" + + "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" + + "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" + + "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" + + "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" + + "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)"; + + StringBuffer classifierBuffer = new StringBuffer(); + StringBuffer versionBuffer = new StringBuffer(); + + boolean firstVersionTokenEncountered = false; + boolean firstToken = true; + + int tokensIterated = 0; + for ( Iterator it = avceTokenList.iterator(); it.hasNext(); ) + { + String token = (String) it.next(); + + boolean tokenIsVersionPart = token.matches( validVersionParts ); + + StringBuffer bufferToUpdate; + + // NOTE: logic in code is reversed, since we're peeling off the back + // Any token after the last versionPart will be in the classifier. + // Any token UP TO first non-versionPart is part of the version. + if ( !tokenIsVersionPart ) + { + if ( firstVersionTokenEncountered ) + { + //noinspection BreakStatement + break; + } + else + { + bufferToUpdate = classifierBuffer; + } + } + else + { + firstVersionTokenEncountered = true; + + bufferToUpdate = versionBuffer; + } + + if ( firstToken ) + { + firstToken = false; + } + else + { + bufferToUpdate.insert( 0, '-' ); + } + + bufferToUpdate.insert( 0, token ); + + tokensIterated++; + } + + // Now, restore the proper ordering so we can build the artifactId. + Collections.reverse( avceTokenList ); + + // if we didn't find a version, then punt. Use the last token + // as the version, and set the classifier empty. + if ( versionBuffer.length() < 1 ) + { + if ( avceTokenList.size() > 1 ) + { + int lastIdx = avceTokenList.size() - 1; + + versionBuffer.append( avceTokenList.get( lastIdx ) ); + avceTokenList.remove( lastIdx ); + } + + classifierBuffer.setLength( 0 ); + } + else + { + // if everything is kosher, then pop off all the classifier and + // version tokens, leaving the naked artifact id in the list. + avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) ); + } + + StringBuffer artifactIdBuffer = new StringBuffer(); + + firstToken = true; + for ( Iterator it = avceTokenList.iterator(); it.hasNext(); ) + { + String token = (String) it.next(); + + if ( firstToken ) + { + firstToken = false; + } + else + { + artifactIdBuffer.append( '-' ); + } + + artifactIdBuffer.append( token ); + } + + String artifactId = artifactIdBuffer.toString(); + + if ( artifactId.length() > 0 ) + { + int lastVersionCharIdx = versionBuffer.length() - 1; + if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' ) + { + versionBuffer.setLength( lastVersionCharIdx ); + } + + String version = versionBuffer.toString(); + + if ( version.length() > 0 ) + { + if ( classifierBuffer.length() > 0 ) + { + result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, + classifierBuffer.toString() ); + } + else + { + result = artifactFactory.createArtifact( groupId, artifactId, version, + Artifact.SCOPE_RUNTIME, type ); + } + } + else + { + throw new BuilderException( "Path filename version is empty" ); + } + } + else + { + throw new BuilderException( "Path filename artifactId is empty" ); + } + } + else + { + throw new BuilderException( "Path artifact type does not corresspond to an artifact type" ); + } + } + else + { + throw new BuilderException( "Path does not match a legacy repository path for an artifact" ); + } + + return result; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java new file mode 100644 index 000000000..8e3c67838 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifact.java @@ -0,0 +1,76 @@ +package org.apache.maven.archiva.common.artifact.managed; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; + +import java.util.HashMap; +import java.util.Map; + +/** + * ManagedArtifact + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ManagedArtifact +{ + private String repositoryId; + + private Artifact artifact; + + private String path; + + protected Map attached; + + public ManagedArtifact( String repoId, Artifact artifact, String path ) + { + super(); + this.repositoryId = repoId; + this.artifact = artifact; + this.path = path; + this.attached = new HashMap(); + } + + public Artifact getArtifact() + { + return artifact; + } + + public String getPath() + { + return path; + } + + public String getRepositoryId() + { + return repositoryId; + } + + public Map getAttached() + { + return attached; + } + + public void setAttached( Map attached ) + { + this.attached = attached; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java new file mode 100644 index 000000000..b653d160a --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedArtifactTypes.java @@ -0,0 +1,81 @@ +package org.apache.maven.archiva.common.artifact.managed; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; + +import java.util.ArrayList; +import java.util.List; + +/** + * ManagedArtifactTypes - provides place to test an unknown artifact type. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ManagedArtifactTypes +{ + public static final int GENERIC = 0; + + public static final int JAVA = 1; + + public static final int EJB = 2; + + private static List javaArtifacts; + + private static List ejbArtifacts; + + static + { + javaArtifacts = new ArrayList(); + javaArtifacts.add( "jar" ); + javaArtifacts.add( "war" ); + javaArtifacts.add( "sar" ); + javaArtifacts.add( "rar" ); + javaArtifacts.add( "ear" ); + + ejbArtifacts = new ArrayList(); + ejbArtifacts.add( "ejb" ); + ejbArtifacts.add( "ejb-client" ); + } + + public static int whichType( String type ) + { + if ( StringUtils.isBlank( type ) ) + { + // TODO: is an empty type even possible? + return GENERIC; + } + + type = type.toLowerCase(); + + if ( ejbArtifacts.contains( type ) ) + { + return EJB; + } + + if ( javaArtifacts.contains( type ) ) + { + return JAVA; + } + + return GENERIC; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java new file mode 100644 index 000000000..1759df2db --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedEjbArtifact.java @@ -0,0 +1,49 @@ +package org.apache.maven.archiva.common.artifact.managed; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; + +/** + * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ManagedEjbArtifact + extends ManagedJavaArtifact +{ + public static final String CLIENT = "client"; + + public ManagedEjbArtifact( String repoId, Artifact artifact, String path ) + { + super( repoId, artifact, path ); + } + + public String getClientPath() + { + return (String) super.attached.get( CLIENT ); + } + + public void setClientPath( String clientPath ) + { + super.attached.put( CLIENT, clientPath ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java new file mode 100644 index 000000000..203234b06 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/artifact/managed/ManagedJavaArtifact.java @@ -0,0 +1,62 @@ +package org.apache.maven.archiva.common.artifact.managed; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; + +/** + * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source + * reference jars. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ManagedJavaArtifact + extends ManagedArtifact +{ + public static final String JAVADOC = "javadoc"; + + public static final String SOURCES = "sources"; + + public ManagedJavaArtifact( String repoId, Artifact artifact, String path ) + { + super( repoId, artifact, path ); + } + + public String getJavadocPath() + { + return (String) super.attached.get( JAVADOC ); + } + + public void setJavadocPath( String javadocPath ) + { + super.attached.put( JAVADOC, javadocPath ); + } + + public String getSourcesPath() + { + return (String) super.attached.get( SOURCES ); + } + + public void setSourcesPath( String sourcesPath ) + { + super.attached.put( SOURCES, sourcesPath ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java new file mode 100644 index 000000000..603434630 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/AbstractConsumer.java @@ -0,0 +1,66 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.factory.ArtifactFactory; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.AbstractLogEnabled; + +import java.util.Collections; +import java.util.List; + +/** + * AbstractDiscovererConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractConsumer + extends AbstractLogEnabled + implements Consumer +{ + /** + * @plexus.requirement + */ + protected ArtifactFactory artifactFactory; + + protected ArtifactRepository repository; + + protected AbstractConsumer() + { + /* do nothing */ + } + + public List getExcludePatterns() + { + return Collections.EMPTY_LIST; + } + + public boolean init( ArtifactRepository repository ) + { + this.repository = repository; + return isEnabled(); + } + + protected boolean isEnabled() + { + return true; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java new file mode 100644 index 000000000..fad6f2fa8 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/Consumer.java @@ -0,0 +1,90 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.repository.ArtifactRepository; + +import java.util.List; + +/** + * DiscovererConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public interface Consumer +{ + public static final String ROLE = Consumer.class.getName(); + + /** + * This is the human readable name for the discoverer. + * + * @return the human readable discoverer name. + */ + public String getName(); + + /** + * This is used to initialize any internals in the consumer before it is used. + * + * This method is called by the internals of archiva and is not meant to be used by other developers. + * This method is called once per repository. + * + * @param repository the repository to initialize the consumer against. + * @return true if the repository is valid for this consumer. false will result in consumer being disabled + * for the provided repository. + */ + public boolean init( ArtifactRepository repository ); + + /** + * Get the List of excluded file patterns for this consumer. + * + * @return the list of excluded file patterns for this consumer. + */ + public List getExcludePatterns(); + + /** + * Get the List of included file patterns for this consumer. + * + * @return the list of included file patterns for this consumer. + */ + public List getIncludePatterns(); + + /** + * Called by archiva framework to indicate that there is a file suitable for consuming, + * This method will only be called if the {@link #init(ArtifactRepository)} and {@link #getExcludePatterns()} + * and {@link #getIncludePatterns()} all pass for this consumer. + * + * @param file the file to process. + * @throws ConsumerException if there was a problem processing this file. + */ + public void processFile( BaseFile file ) throws ConsumerException; + + /** + * Called by archiva framework to indicate that there has been a problem detected + * on a specific file. + * + * NOTE: It is very possible for 1 file to have more than 1 problem associated with it. + * + * @param file the file to process. + * @param message the message describing the problem. + */ + public void processFileProblem( BaseFile file, String message ); +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java new file mode 100644 index 000000000..0c4c6451a --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerException.java @@ -0,0 +1,52 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.ArchivaException; +import org.apache.maven.archiva.common.utils.BaseFile; + +/** + * ConsumerException - details about the failure of a consumer. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ConsumerException + extends ArchivaException +{ + private BaseFile file; + + public ConsumerException( BaseFile file, String message, Throwable cause ) + { + super( message, cause ); + this.file = file; + } + + public ConsumerException( BaseFile file, String message ) + { + super( message ); + this.file = file; + } + + public BaseFile getFile() + { + return file; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java new file mode 100644 index 000000000..2b2343c0f --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/ConsumerFactory.java @@ -0,0 +1,70 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.codehaus.plexus.PlexusConstants; +import org.codehaus.plexus.PlexusContainer; +import org.codehaus.plexus.context.Context; +import org.codehaus.plexus.context.ContextException; +import org.codehaus.plexus.logging.AbstractLogEnabled; +import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable; + +/** + * DiscovererConsumerFactory - factory for consumers. + * + * @author Joakim Erdfelt + * @version $Id$ + * @plexus.component role="org.apache.maven.archiva.common.consumers.ConsumerFactory" + */ +public class ConsumerFactory + extends AbstractLogEnabled + implements Contextualizable +{ + public static final String ROLE = ConsumerFactory.class.getName(); + + private PlexusContainer container; + + public Consumer createConsumer( String name ) + throws ConsumerException + { + getLogger().info( "Attempting to create consumer [" + name + "]" ); + + Consumer consumer; + try + { + consumer = (Consumer) container.lookup( Consumer.ROLE, container.getLookupRealm() ); + } + catch ( Throwable t ) + { + String emsg = "Unable to create consumer [" + name + "]: " + t.getMessage(); + getLogger().warn( t.getMessage(), t ); + throw new ConsumerException( null, emsg, t ); + } + + getLogger().info( "Created consumer [" + name + "|" + consumer.getName() + "]" ); + return consumer; + } + + public void contextualize( Context context ) + throws ContextException + { + container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java new file mode 100644 index 000000000..c9e5437b7 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumer.java @@ -0,0 +1,130 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.artifact.builder.BuilderException; +import org.apache.maven.archiva.common.artifact.builder.DefaultLayoutArtifactBuilder; +import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder; +import org.apache.maven.archiva.common.artifact.builder.LegacyLayoutArtifactBuilder; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; +import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout; +import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * DefaultArtifactConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class GenericArtifactConsumer + extends AbstractConsumer + implements Consumer +{ + public abstract void processArtifact( Artifact artifact, BaseFile file ); + + private Map artifactBuilders = new HashMap(); + + private static final List includePatterns; + + static + { + includePatterns = new ArrayList(); + includePatterns.add( "**/*.pom" ); + includePatterns.add( "**/*.jar" ); + includePatterns.add( "**/*.war" ); + includePatterns.add( "**/*.ear" ); + includePatterns.add( "**/*.sar" ); + includePatterns.add( "**/*.zip" ); + includePatterns.add( "**/*.gz" ); + includePatterns.add( "**/*.bz2" ); + } + + private String layoutId = "default"; + + public boolean init( ArtifactRepository repository ) + { + this.artifactBuilders.clear(); + this.artifactBuilders.put( "default", new DefaultLayoutArtifactBuilder( artifactFactory ) ); + this.artifactBuilders.put( "legacy", new LegacyLayoutArtifactBuilder( artifactFactory ) ); + + if ( repository.getLayout() instanceof LegacyRepositoryLayout ) + { + this.layoutId = "legacy"; + } + + return super.init( repository ); + } + + public List getIncludePatterns() + { + return includePatterns; + } + + public boolean isEnabled() + { + ArtifactRepositoryLayout layout = repository.getLayout(); + return ( layout instanceof DefaultRepositoryLayout ) || ( layout instanceof LegacyRepositoryLayout ); + } + + public void processFile( BaseFile file ) + throws ConsumerException + { + if ( file.length() <= 0 ) + { + processFileProblem( file, "File is empty." ); + } + + if ( !file.canRead() ) + { + processFileProblem( file, "Not allowed to read file due to permission settings on file." ); + } + + try + { + Artifact artifact = buildArtifact( file ); + + processArtifact( artifact, file ); + } + catch ( BuilderException e ) + { + throw new ConsumerException( file, e.getMessage(), e ); + } + } + + private Artifact buildArtifact( BaseFile file ) + throws BuilderException + { + LayoutArtifactBuilder builder = (LayoutArtifactBuilder) artifactBuilders.get( layoutId ); + + Artifact artifact = builder.build( file.getRelativePath() ); + artifact.setRepository( repository ); + artifact.setFile( file ); + + return artifact; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java new file mode 100644 index 000000000..efcd7af64 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericModelConsumer.java @@ -0,0 +1,98 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.model.Model; +import org.apache.maven.model.io.xpp3.MavenXpp3Reader; +import org.codehaus.plexus.util.IOUtil; +import org.codehaus.plexus.util.xml.pull.XmlPullParserException; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.List; + +/** + * GenericModelConsumer - consumer for pom files. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class GenericModelConsumer + extends AbstractConsumer + implements Consumer +{ + public abstract void processModel( Model model, BaseFile file ); + + private static final List includePatterns; + + static + { + includePatterns = new ArrayList(); + includePatterns.add( "**/*.pom" ); + } + + public List getIncludePatterns() + { + return includePatterns; + } + + public boolean isEnabled() + { + return true; + } + + public void processFile( BaseFile file ) + throws ConsumerException + { + Model model = buildModel( file ); + processModel( model, file ); + } + + private Model buildModel( BaseFile file ) + throws ConsumerException + { + Model model; + Reader reader = null; + try + { + reader = new FileReader( file ); + MavenXpp3Reader modelReader = new MavenXpp3Reader(); + + model = modelReader.read( reader ); + } + catch ( XmlPullParserException e ) + { + throw new ConsumerException( file, "Error parsing metadata file: " + e.getMessage(), e ); + } + catch ( IOException e ) + { + throw new ConsumerException( file, "Error reading metadata file: " + e.getMessage(), e ); + } + finally + { + IOUtil.close( reader ); + } + + return model; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java new file mode 100644 index 000000000..1f4433c49 --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumer.java @@ -0,0 +1,231 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; +import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout; +import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata; +import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata; +import org.apache.maven.artifact.repository.metadata.Metadata; +import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; +import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata; +import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader; +import org.codehaus.plexus.util.IOUtil; +import org.codehaus.plexus.util.xml.pull.XmlPullParserException; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.StringTokenizer; + +/** + * GenericRepositoryMetadataConsumer - Consume any maven-metadata.xml files as {@link RepositoryMetadata} objects. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class GenericRepositoryMetadataConsumer + extends AbstractConsumer + implements Consumer +{ + public abstract void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file ); + + private static final List includePatterns; + + static + { + includePatterns = new ArrayList(); + includePatterns.add( "**/maven-metadata.xml" ); + } + + public List getIncludePatterns() + { + return includePatterns; + } + + public boolean isEnabled() + { + // the RepositoryMetadata objects only exist in 'default' layout repositories. + ArtifactRepositoryLayout layout = repository.getLayout(); + return ( layout instanceof DefaultRepositoryLayout ); + } + + public void processFile( BaseFile file ) + throws ConsumerException + { + if ( file.length() <= 0 ) + { + throw new ConsumerException( file, "File is empty." ); + } + + if ( !file.canRead() ) + { + throw new ConsumerException( file, "Not allowed to read file due to permission settings on file." ); + } + + RepositoryMetadata metadata = buildMetadata( file ); + processRepositoryMetadata( metadata, file ); + } + + private RepositoryMetadata buildMetadata( BaseFile metadataFile ) + throws ConsumerException + { + Metadata m; + Reader reader = null; + try + { + reader = new FileReader( metadataFile ); + MetadataXpp3Reader metadataReader = new MetadataXpp3Reader(); + + m = metadataReader.read( reader ); + } + catch ( XmlPullParserException e ) + { + throw new ConsumerException( metadataFile, "Error parsing metadata file: " + e.getMessage(), e ); + } + catch ( IOException e ) + { + throw new ConsumerException( metadataFile, "Error reading metadata file: " + e.getMessage(), e ); + } + finally + { + IOUtil.close( reader ); + } + + RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataFile ); + + if ( repositoryMetadata == null ) + { + throw new ConsumerException( metadataFile, "Unable to build a repository metadata from path." ); + } + + return repositoryMetadata; + } + + /** + * Builds a RepositoryMetadata object from a Metadata object and its path. + * + * @param m Metadata + * @param metadataFile file information + * @return RepositoryMetadata if the parameters represent one; null if not + * @throws ConsumerException + */ + private RepositoryMetadata buildMetadata( Metadata m, BaseFile metadataFile ) + throws ConsumerException + { + if ( artifactFactory == null ) + { + throw new IllegalStateException( "Unable to build metadata with a null artifactFactory." ); + } + + String metaGroupId = m.getGroupId(); + String metaArtifactId = m.getArtifactId(); + String metaVersion = m.getVersion(); + + // check if the groupId, artifactId and version is in the + // metadataPath + // parse the path, in reverse order + List pathParts = new ArrayList(); + StringTokenizer st = new StringTokenizer( metadataFile.getRelativePath(), "/\\" ); + while ( st.hasMoreTokens() ) + { + pathParts.add( st.nextToken() ); + } + + Collections.reverse( pathParts ); + // remove the metadata file + pathParts.remove( 0 ); + Iterator it = pathParts.iterator(); + String tmpDir = (String) it.next(); + + Artifact artifact = null; + if ( StringUtils.isNotEmpty( metaVersion ) ) + { + artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion ); + } + + // snapshotMetadata + RepositoryMetadata metadata = null; + if ( tmpDir != null && tmpDir.equals( metaVersion ) ) + { + if ( artifact != null ) + { + metadata = new SnapshotArtifactRepositoryMetadata( artifact ); + } + } + else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) ) + { + // artifactMetadata + if ( artifact != null ) + { + metadata = new ArtifactRepositoryMetadata( artifact ); + } + else + { + artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" ); + metadata = new ArtifactRepositoryMetadata( artifact ); + } + } + else + { + String groupDir = ""; + int ctr = 0; + for ( it = pathParts.iterator(); it.hasNext(); ) + { + String path = (String) it.next(); + if ( ctr == 0 ) + { + groupDir = path; + } + else + { + groupDir = path + "." + groupDir; + } + ctr++; + } + + // groupMetadata + if ( metaGroupId != null && metaGroupId.equals( groupDir ) ) + { + metadata = new GroupRepositoryMetadata( metaGroupId ); + } + else + { + /* If we reached this point, we have some bad metadata. + * We have a metadata file, with values for groupId / artifactId / version. + * But the information it is providing does not exist relative to the file location. + * + * See ${basedir}/src/test/repository/javax/maven-metadata.xml for example + */ + throw new ConsumerException( metadataFile, + "Contents of metadata are not appropriate for its location on disk." ); + } + } + + return metadata; + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java new file mode 100644 index 000000000..a4b83db7e --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/BaseFile.java @@ -0,0 +1,105 @@ +package org.apache.maven.archiva.common.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.io.File; +import java.net.URI; + +/** + * BaseFile - convenient File object that tracks the Base Directory and can provide relative path values + * for the file object based on that Base Directory value. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class BaseFile + extends File +{ + private File baseDir; + + public BaseFile( File pathFile ) + { + this( pathFile.getAbsolutePath() ); + } + + public BaseFile( File repoDir, File pathFile ) + { + this( repoDir, PathUtil.getRelative( repoDir.getAbsolutePath(), pathFile ) ); + } + + public BaseFile( File parent, String child ) + { + super( parent, child ); + this.baseDir = parent; + } + + public BaseFile( String pathname ) + { + super( pathname ); + + // Calculate the top level directory. + + File parent = this; + while ( parent.getParentFile() != null ) + { + parent = parent.getParentFile(); + } + + this.baseDir = parent; + } + + public BaseFile( String repoDir, File pathFile ) + { + this( new File( repoDir ), pathFile ); + } + + public BaseFile( String parent, String child ) + { + super( parent, child ); + this.baseDir = new File( parent ); + } + + public BaseFile( URI uri ) + { + super( uri ); // only to satisfy java compiler. + throw new IllegalStateException( "The " + BaseFile.class.getName() + + " object does not support URI construction." ); + } + + public File getBaseDir() + { + return baseDir; + } + + public String getRelativePath() + { + return PathUtil.getRelative( this.baseDir.getAbsolutePath(), this ); + } + + public void setBaseDir( File baseDir ) + { + this.baseDir = baseDir; + } + + public void setBaseDir( String repoDir ) + { + setBaseDir( new File( repoDir ) ); + } +} diff --git a/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java new file mode 100644 index 000000000..25df4254a --- /dev/null +++ b/archiva-common/src/main/java/org/apache/maven/archiva/common/utils/PathUtil.java @@ -0,0 +1,56 @@ +package org.apache.maven.archiva.common.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.io.File; + +/** + * PathUtil - simple utility methods for path manipulation. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class PathUtil +{ + public static String getRelative( String basedir, File file ) + { + return getRelative( basedir, file.getAbsolutePath() ); + } + + public static String getRelative( String basedir, String child ) + { + if ( child.startsWith( basedir ) ) + { + // simple solution. + return child.substring( basedir.length() + 1 ); + } + + String absoluteBasedir = new File( basedir ).getAbsolutePath(); + if ( child.startsWith( absoluteBasedir ) ) + { + // resolved basedir solution. + return child.substring( absoluteBasedir.length() + 1 ); + } + + // File is not within basedir. + throw new IllegalStateException( "Unable to obtain relative path of file " + child + + ", it is not within basedir " + basedir + "." ); + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java new file mode 100644 index 000000000..11b2d1c2c --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/AbstractArchivaCommonTestCase.java @@ -0,0 +1,191 @@ +package org.apache.maven.archiva.common; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; +import org.codehaus.plexus.PlexusTestCase; +import org.codehaus.plexus.util.FileUtils; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * AbstractArchivaCommonTestCase + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractArchivaCommonTestCase + extends PlexusTestCase +{ + protected ArtifactRepository getLegacyRepository() + throws Exception + { + File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" ); + ArtifactRepository repository = createRepository( repoBaseDir, "legacy" ); + resetRepositoryState( repository ); + return repository; + } + + protected ArtifactRepository getDefaultRepository() + throws Exception + { + File repoBaseDir = new File( getBasedir(), "src/test/repository" ); + ArtifactRepository repository = createRepository( repoBaseDir, "default" ); + resetRepositoryState( repository ); + return repository; + } + + private void resetRepositoryState( ArtifactRepository repository ) + throws IOException + { + File repoBaseDir = new File( repository.getBasedir() ); + + List tmpfiles = FileUtils.getFiles( repoBaseDir, ".*", "" ); + for ( Iterator it = tmpfiles.iterator(); it.hasNext(); ) + { + File hit = (File) it.next(); + if ( hit.exists() ) + { + if ( hit.isFile() ) + { + hit.delete(); + } + + if ( hit.isDirectory() ) + { + FileUtils.deleteDirectory( hit ); + } + } + } + } + + protected ArtifactRepository createRepository( File basedir, String layout ) + throws Exception + { + ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE ); + + ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout ); + + return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, repoLayout, null, null ); + } + + public List getLegacyLayoutArtifactPaths() + { + List files = new ArrayList(); + + files.add( "invalid/jars/1.0/invalid-1.0.jar" ); + files.add( "invalid/jars/invalid-1.0.rar" ); + files.add( "invalid/jars/invalid.jar" ); + files.add( "invalid/invalid-1.0.jar" ); + files.add( "javax.sql/jars/jdbc-2.0.jar" ); + files.add( "org.apache.maven/jars/some-ejb-1.0-client.jar" ); + files.add( "org.apache.maven/jars/testing-1.0.jar" ); + files.add( "org.apache.maven/jars/testing-1.0-sources.jar" ); + files.add( "org.apache.maven/jars/testing-UNKNOWN.jar" ); + files.add( "org.apache.maven/jars/testing-1.0.zip" ); + files.add( "org.apache.maven/jars/testing-1.0-20050611.112233-1.jar" ); + files.add( "org.apache.maven/jars/testing-1.0.tar.gz" ); + files.add( "org.apache.maven.update/jars/test-not-updated-1.0.jar" ); + files.add( "org.apache.maven.update/jars/test-updated-1.0.jar" ); + + return files; + } + + public List getDefaultLayoutArtifactPaths() + { + List files = new ArrayList(); + + files.add( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" ); + files.add( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ); + files.add( "invalid/invalid/1.0/invalid-1.0b.jar" ); + files.add( "invalid/invalid/1.0/invalid-2.0.jar" ); + files.add( "invalid/invalid-1.0.jar" ); + files.add( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" ); + files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ); + files.add( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ); + files.add( "org/apache/maven/A/1.0/A-1.0.war" ); + files.add( "org/apache/maven/A/1.0/A-1.0.pom" ); + files.add( "org/apache/maven/B/2.0/B-2.0.pom" ); + files.add( "org/apache/maven/B/1.0/B-1.0.pom" ); + files.add( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ); + files.add( "org/apache/maven/C/1.0/C-1.0.war" ); + files.add( "org/apache/maven/C/1.0/C-1.0.pom" ); + files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" ); + files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar" ); + files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" ); + files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar" ); + files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" ); + files.add( "org/apache/maven/testing/1.0/testing-1.0-test-sources.jar" ); + files.add( "org/apache/maven/testing/1.0/testing-1.0.jar" ); + files.add( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ); + files.add( "org/apache/maven/testing/1.0/testing-1.0.zip" ); + files.add( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ); + files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" ); + files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.jar" ); + files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" ); + files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.jar" ); + files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" ); + files.add( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ); + + return files; + } + + public List getDefaultLayoutMetadataPaths() + { + List files = new ArrayList(); + + files.add( "org/apache/maven/some-ejb/1.0/maven-metadata.xml" ); + files.add( "org/apache/maven/update/test-not-updated/maven-metadata.xml" ); + files.add( "org/apache/maven/update/test-updated/maven-metadata.xml" ); + files.add( "org/apache/maven/maven-metadata.xml" ); + files.add( "org/apache/testgroup/discovery/1.0/maven-metadata.xml" ); + files.add( "org/apache/testgroup/discovery/maven-metadata.xml" ); + files.add( "javax/sql/jdbc/2.0/maven-metadata-repository.xml" ); + files.add( "javax/sql/jdbc/maven-metadata-repository.xml" ); + files.add( "javax/sql/maven-metadata-repository.xml" ); + files.add( "javax/maven-metadata.xml" ); + + return files; + } + + public List getDefaultLayoutModelPaths() + { + List files = new ArrayList(); + + files.add( "org/apache/maven/A/1.0/A-1.0.pom" ); + files.add( "org/apache/maven/B/2.0/B-2.0.pom" ); + files.add( "org/apache/maven/B/1.0/B-1.0.pom" ); + files.add( "org/apache/maven/C/1.0/C-1.0.pom" ); + files.add( "org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom" ); + files.add( "org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom" ); + files.add( "org/apache/maven/discovery/1.0/discovery-1.0.pom" ); + files.add( "org/apache/maven/samplejar/2.0/samplejar-2.0.pom" ); + files.add( "org/apache/maven/samplejar/1.0/samplejar-1.0.pom" ); + files.add( "org/apache/testgroup/discovery/1.0/discovery-1.0.pom" ); + + return files; + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java new file mode 100644 index 000000000..878f7fe1a --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/AllTests.java @@ -0,0 +1,45 @@ +package org.apache.maven.archiva.common; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common" ); + //$JUnit-BEGIN$ + suite.addTest( org.apache.maven.archiva.common.artifact.builder.AllTests.suite() ); + suite.addTest( org.apache.maven.archiva.common.consumers.AllTests.suite() ); + suite.addTest( org.apache.maven.archiva.common.utils.AllTests.suite() ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java new file mode 100644 index 000000000..9d9b55df0 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AbstractLayoutArtifactBuilderTestCase.java @@ -0,0 +1,51 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.artifact.Artifact; +import org.codehaus.plexus.PlexusTestCase; + +/** + * AbstractLayoutArtifactBuilderTestCase + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractLayoutArtifactBuilderTestCase +extends PlexusTestCase +{ + + protected void assertArtifact( String groupId, String artifactId, String version, String type, String classifier, Artifact artifact ) + { + assertNotNull( "Artifact cannot be null.", artifact ); + + assertEquals( "Artifact groupId", groupId, artifact.getGroupId() ); + assertEquals( "Artifact artifactId", artifactId, artifact.getArtifactId() ); + assertEquals( "Artifact version", version, artifact.getVersion() ); + assertEquals( "Artifact type", type, artifact.getType() ); + + if ( StringUtils.isNotBlank( classifier ) ) + { + assertEquals( "Artifact classifier", classifier, artifact.getClassifier() ); + } + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java new file mode 100644 index 000000000..45511c374 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/AllTests.java @@ -0,0 +1,44 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer.builders" ); + //$JUnit-BEGIN$ + suite.addTestSuite( LegacyLayoutArtifactBuilderTest.class ); + suite.addTestSuite( DefaultLayoutArtifactBuilderTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java new file mode 100644 index 000000000..954a5bb68 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/DefaultLayoutArtifactBuilderTest.java @@ -0,0 +1,206 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * DefaultLayoutArtifactBuilderTest + * + * @author Brett Porter + * @author Joakim Erdfelt + * @version $Id$ + */ +public class DefaultLayoutArtifactBuilderTest + extends AbstractLayoutArtifactBuilderTestCase +{ + LayoutArtifactBuilder builder; + + protected void setUp() + throws Exception + { + super.setUp(); + + builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "default" ); + assertNotNull( builder ); + } + + protected void tearDown() + throws Exception + { + if ( builder != null ) + { + release( builder ); + } + super.tearDown(); + } + + public void testPathDistributionArtifacts() + throws BuilderException + { + assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, builder + .build( "org/apache/maven/testing/1.0/testing-1.0.tar.gz" ) ); + + assertArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, builder + .build( "org/apache/maven/testing/1.0/testing-1.0.zip" ) ); + } + + public void testPathNormal() + throws BuilderException + { + assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder + .build( "/org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) ); + + assertArtifact( "org.apache.maven.wagon", "wagon", "1.0", "jar", null, builder + .build( "org/apache/maven/wagon/wagon/1.0/wagon-1.0.jar" ) ); + + assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, builder.build( "javax/sql/jdbc/2.0/jdbc-2.0.jar" ) ); + + } + + public void testPathSnapshots() + throws BuilderException + { + assertArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT", "jar", null, builder + .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar" ) ); + + assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, builder + .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar" ) ); + } + + public void testPathSnapshotWithClassifier() + throws BuilderException + { + assertArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", builder + .build( "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar" ) ); + } + + public void testPathWithClassifier() + throws BuilderException + { + assertArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", builder + .build( "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar" ) ); + } + + public void testPathWithJavaSourceInclusion() + throws BuilderException + { + assertArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", builder + .build( "org/apache/maven/testing/1.0/testing-1.0-sources.jar" ) ); + } + + public void testProblemMissingType() + { + try + { + builder.build( "invalid/invalid/1/invalid-1" ); + fail( "Should have detected missing type." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path filename does not have an extension.", e.getMessage() ); + } + } + + public void testProblemNonSnapshotInSnapshotDir() + { + try + { + builder.build( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ); + fail( "Non Snapshot artifact inside of an Snapshot dir is invalid." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", e.getMessage() ); + } + } + + public void testProblemPathTooShort() + { + try + { + builder.build( "invalid/invalid-1.0.jar" ); + fail( "Should have detected that path is too short." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path is too short to build an artifact from.", e.getMessage() ); + } + } + + public void testProblemTimestampSnapshotNotInSnapshotDir() + { + try + { + builder.build( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" ); + fail( "Timestamped Snapshot artifact not inside of an Snapshot dir is invalid." ); + } + catch ( BuilderException e ) + { + /* expected path */ + // TODO: Is this really the right thing to do for this kind of artifact?? + assertEquals( "Built snapshot artifact base version does not match path version: 1.0-SNAPSHOT; " + + "should have been version: 1.0-20050611.123456-1", e.getMessage() ); + } + } + + public void testProblemVersionPathMismatch() + { + try + { + builder.build( "invalid/invalid/1.0/invalid-2.0.jar" ); + fail( "Should have detected version mismatch between path and artifact." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Built artifact version does not match path version", e.getMessage() ); + } + } + + public void testProblemVersionPathMismatchAlt() + { + try + { + builder.build( "invalid/invalid/1.0/invalid-1.0b.jar" ); + fail( "Should have version mismatch between directory and artifact." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path version does not corresspond to an artifact version", e.getMessage() ); + } + } + + public void testProblemWrongArtifactId() + { + try + { + builder.build( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" ); + fail( "Should have detected wrong artifact Id." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path filename does not correspond to an artifact.", e.getMessage() ); + } + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java new file mode 100644 index 000000000..2fdae4a14 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/artifact/builder/LegacyLayoutArtifactBuilderTest.java @@ -0,0 +1,160 @@ +package org.apache.maven.archiva.common.artifact.builder; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.Artifact; +import org.codehaus.plexus.component.repository.exception.ComponentLookupException; + +/** + * LegacyLayoutArtifactBuilderTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class LegacyLayoutArtifactBuilderTest + extends AbstractLayoutArtifactBuilderTestCase +{ + LayoutArtifactBuilder builder; + + protected void setUp() + throws Exception + { + super.setUp(); + + builder = (LayoutArtifactBuilder) lookup( LayoutArtifactBuilder.class.getName(), "legacy" ); + assertNotNull( builder ); + } + + protected void tearDown() + throws Exception + { + if ( builder != null ) + { + release( builder ); + } + super.tearDown(); + } + + public void testPathNormal() + throws BuilderException + { + Artifact artifact = builder.build( "javax.sql/jars/jdbc-2.0.jar" ); + + assertArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifact ); + } + + public void testPathFinal() + throws BuilderException + { + Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar" ); + + assertArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606", "jar", null, artifact ); + } + + public void testPathSnapshot() + throws BuilderException + { + Artifact artifact = builder.build( "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar" ); + + assertArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT", "jar", null, artifact ); + } + + public void testPathJavadoc() + throws BuilderException + { + Artifact artifact = builder.build( "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar" ); + + assertArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc", artifact ); + } + + public void testPathSources() + throws BuilderException + { + Artifact artifact = builder.build( "javax.sql/java-sources/jdbc-2.0-sources.jar" ); + + assertArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources", artifact ); + } + + public void testPathPlugin() + throws BuilderException + { + Artifact artifact = builder.build( "maven/plugins/maven-test-plugin-1.8.jar" ); + + assertArtifact( "maven", "maven-test-plugin", "1.8", "plugin", null, artifact ); + } + + public void testProblemNoType() + { + try + { + builder.build( "invalid/invalid/1/invalid-1" ); + + fail( "Should have detected no type." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path does not match a legacy repository path for an artifact", e.getMessage() ); + } + } + + public void testProblemWrongArtifactPackaging() + throws ComponentLookupException + { + try + { + builder.build( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" ); + + fail( "Should have detected wrong package extension." ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path type does not match the extension", e.getMessage() ); + } + } + + public void testProblemNoArtifactId() + { + try + { + builder.build( "groupId/jars/-1.0.jar" ); + + fail( "Should have detected artifactId is missing" ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path filename artifactId is empty", e.getMessage() ); + } + + try + { + builder.build( "groupId/jars/1.0.jar" ); + + fail( "Should have detected artifactId is missing" ); + } + catch ( BuilderException e ) + { + /* expected path */ + assertEquals( "Path filename artifactId is empty", e.getMessage() ); + } + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java new file mode 100644 index 000000000..08abd6432 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AbstractGenericConsumerTestCase.java @@ -0,0 +1,52 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.AbstractArchivaCommonTestCase; + +/** + * AbstractGenericConsumerTestCase + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractGenericConsumerTestCase + extends AbstractArchivaCommonTestCase +{ + protected ConsumerFactory consumerFactory; + + protected void setUp() + throws Exception + { + super.setUp(); + + consumerFactory = (ConsumerFactory) lookup( ConsumerFactory.ROLE ); + } + + protected void tearDown() + throws Exception + { + if ( consumerFactory != null ) + { + release( consumerFactory ); + } + super.tearDown(); + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java new file mode 100644 index 000000000..b04c4c37c --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/AllTests.java @@ -0,0 +1,43 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.consumers" ); + //$JUnit-BEGIN$ + suite.addTestSuite( GenericArtifactConsumerTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java new file mode 100644 index 000000000..2773c99c0 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/FileProblemsTracker.java @@ -0,0 +1,112 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * FileProblemsTracker + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class FileProblemsTracker +{ + private Map problemMap = new HashMap(); + + public void addProblem( BaseFile file, String message ) + { + String path = file.getRelativePath(); + addProblem( path, message ); + } + + private void addProblem( String path, String message ) + { + List problems = getProblems( path ); + problems.add( message ); + problemMap.put( path, problems ); + } + + public void addProblem( ConsumerException e ) + { + if ( e.getFile() != null ) + { + this.addProblem( e.getFile(), e.getMessage() ); + } + else + { + this.addProblem( "|fatal|", e.getMessage() ); + } + } + + public boolean hasProblems( String path ) + { + if ( !problemMap.containsKey( path ) ) + { + // No tracking of path at all. + return false; + } + + List problems = (List) problemMap.get( path ); + if ( problems == null ) + { + // found path, but no list. + return false; + } + + return !problems.isEmpty(); + } + + public Set getPaths() + { + return problemMap.keySet(); + } + + public List getProblems( String path ) + { + List problems = (List) problemMap.get( path ); + if ( problems == null ) + { + problems = new ArrayList(); + } + + return problems; + } + + public int getProblemCount() + { + int count = 0; + for ( Iterator it = problemMap.values().iterator(); it.hasNext(); ) + { + List problems = (List) it.next(); + count += problems.size(); + } + + return count; + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java new file mode 100644 index 000000000..1ff2d1ebb --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.java @@ -0,0 +1,220 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.ArtifactRepository; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +/** + * GenericArtifactConsumerTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class GenericArtifactConsumerTest + extends AbstractGenericConsumerTestCase +{ + private MockArtifactConsumer getMockArtifactConsumer() + throws Exception + { + return (MockArtifactConsumer) consumerFactory.createConsumer( "mock-artifact" ); + } + + public void testScanLegacy() + throws Exception + { + ArtifactRepository repository = getLegacyRepository(); + List consumers = new ArrayList(); + + MockArtifactConsumer mockConsumer = getMockArtifactConsumer(); + mockConsumer.init( repository ); + + consumers.add( mockConsumer ); + + List files = getLegacyLayoutArtifactPaths(); + for ( Iterator it = files.iterator(); it.hasNext(); ) + { + String path = (String) it.next(); + try + { + mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) ); + } + catch ( ConsumerException e ) + { + mockConsumer.getProblemsTracker().addProblem( e ); + } + } + + assertNotNull( consumers ); + + FileProblemsTracker tracker = mockConsumer.getProblemsTracker(); + + assertTracker( tracker, 16 ); + + assertHasFailureMessage( "Path does not match a legacy repository path for an artifact", + "invalid/invalid-1.0.jar", tracker ); + assertHasFailureMessage( "Path filename version is empty", "invalid/jars/invalid.jar", tracker ); + assertHasFailureMessage( "Path does not match a legacy repository path for an artifact", + "invalid/jars/1.0/invalid-1.0.jar", tracker ); + + assertEquals( 10, mockConsumer.getArtifactMap().size() ); + } + + public void testScanDefault() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + + MockArtifactConsumer mockConsumer = getMockArtifactConsumer(); + mockConsumer.init( repository ); + + consumers.add( mockConsumer ); + + List files = getDefaultLayoutArtifactPaths(); + for ( Iterator it = files.iterator(); it.hasNext(); ) + { + String path = (String) it.next(); + try + { + mockConsumer.processFile( new BaseFile( repository.getBasedir(), path ) ); + } + catch ( ConsumerException e ) + { + mockConsumer.getProblemsTracker().addProblem( e ); + } + } + + // Test gathered information from Mock consumer. + + assertNotNull( consumers ); + + FileProblemsTracker tracker = mockConsumer.getProblemsTracker(); + + assertTracker( tracker, 21 ); + + assertHasFailureMessage( "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", + "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar", tracker ); + assertHasFailureMessage( "Path is too short to build an artifact from.", "invalid/invalid-1.0.jar", tracker ); + assertHasFailureMessage( "Built artifact version does not match path version", + "invalid/invalid/1.0/invalid-2.0.jar", tracker ); + + assertEquals( 25, mockConsumer.getArtifactMap().size() ); + + // Test for known include artifacts + + Collection artifacts = mockConsumer.getArtifactMap().values(); + assertHasArtifact( "org.apache.maven", "testing", "1.0", "jar", null, artifacts ); + assertHasArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client", artifacts ); + assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources", artifacts ); + assertHasArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources", artifacts ); + assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip", null, artifacts ); + assertHasArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz", null, artifacts ); + assertHasArtifact( "javax.sql", "jdbc", "2.0", "jar", null, artifacts ); + assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", null, artifacts ); + assertHasArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc", artifacts ); + + // Test for known excluded files and dirs to validate exclusions. + + Iterator it = mockConsumer.getArtifactMap().values().iterator(); + while ( it.hasNext() ) + { + Artifact a = (Artifact) it.next(); + assertTrue( "Artifact " + a + " should have it's .getFile() set.", a.getFile() != null ); + assertTrue( "Artifact " + a + " should have it's .getRepository() set.", a.getRepository() != null ); + assertTrue( "Artifact " + a + " should have non-null repository url.", a.getRepository().getUrl() != null ); + assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 ); + assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 ); + } + } + + private void dumpProblems( FileProblemsTracker tracker ) + { + int problemNum = 0; + System.out.println( "-- ProblemTracker dump -------------------------" ); + for ( Iterator itPaths = tracker.getPaths().iterator(); itPaths.hasNext(); ) + { + String path = (String) itPaths.next(); + System.out.println( " [" + problemNum + "]: " + path ); + + int messageNum = 0; + for ( Iterator itProblems = tracker.getProblems( path ).iterator(); itProblems.hasNext(); ) + { + String message = (String) itProblems.next(); + System.out.println( " [" + messageNum + "]: " + message ); + messageNum++; + } + + problemNum++; + } + } + + private void assertTracker( FileProblemsTracker tracker, int expectedProblemCount ) + { + assertNotNull( "ProblemsTracker should not be null.", tracker ); + + int actualProblemCount = tracker.getProblemCount(); + if ( expectedProblemCount != actualProblemCount ) + { + dumpProblems( tracker ); + fail( "Problem count (across all paths) expected:<" + expectedProblemCount + ">, actual:<" + + actualProblemCount + ">" ); + } + } + + private void assertHasFailureMessage( String message, String path, FileProblemsTracker tracker ) + { + if ( !tracker.hasProblems( path ) ) + { + fail( "There are no messages for expected path [" + path + "]" ); + } + + assertTrue( "Unable to find message [" + message + "] in path [" + path + "]", tracker.getProblems( path ) + .contains( message ) ); + } + + private void assertHasArtifact( String groupId, String artifactId, String version, String type, String classifier, + Collection collection ) + { + for ( Iterator it = collection.iterator(); it.hasNext(); ) + { + Artifact artifact = (Artifact) it.next(); + if ( StringUtils.equals( groupId, artifact.getGroupId() ) + && StringUtils.equals( artifactId, artifact.getArtifactId() ) + && StringUtils.equals( version, artifact.getVersion() ) + && StringUtils.equals( type, artifact.getType() ) + && StringUtils.equals( classifier, artifact.getClassifier() ) ) + { + // Found it! + return; + } + } + + fail( "Was unable to find artifact " + groupId + ":" + artifactId + ":" + version + ":" + type + ":" + + classifier ); + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java new file mode 100644 index 000000000..94fb5ca3e --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockArtifactConsumer.java @@ -0,0 +1,71 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.archiva.common.utils.PathUtil; +import org.apache.maven.artifact.Artifact; + +import java.util.HashMap; +import java.util.Map; + +/** + * MockArtifactConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers" + * role-hint="mock-artifact" + * instantiation-strategy="per-lookup" + */ +public class MockArtifactConsumer + extends GenericArtifactConsumer +{ + private Map artifactMap = new HashMap(); + + private FileProblemsTracker problemsTracker = new FileProblemsTracker(); + + public void processArtifact( Artifact artifact, BaseFile file ) + { + String relpath = PathUtil.getRelative( repository.getBasedir(), file ); + artifactMap.put( relpath, artifact ); + } + + public void processFileProblem( BaseFile file, String message ) + { + problemsTracker.addProblem( file, message ); + } + + public Map getArtifactMap() + { + return artifactMap; + } + + public String getName() + { + return "Mock Artifact Consumer (Testing Only)"; + } + + public FileProblemsTracker getProblemsTracker() + { + return problemsTracker; + } +} \ No newline at end of file diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java new file mode 100644 index 000000000..969f10b52 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockModelConsumer.java @@ -0,0 +1,70 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.model.Model; + +import java.util.HashMap; +import java.util.Map; + +/** + * MockModelConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers" + * role-hint="mock-model" + * instantiation-strategy="per-lookup" + */ +public class MockModelConsumer + extends GenericModelConsumer +{ + private Map modelMap = new HashMap(); + + private FileProblemsTracker problemsTracker = new FileProblemsTracker(); + + public void processModel( Model model, BaseFile file ) + { + modelMap.put( file.getRelativePath(), model ); + } + + public void processFileProblem( BaseFile file, String message ) + { + problemsTracker.addProblem( file, message ); + } + + public Map getModelMap() + { + return modelMap; + } + + public String getName() + { + return "Mock Model Consumer (Testing Only)"; + } + + public FileProblemsTracker getProblemsTracker() + { + return problemsTracker; + } + +} \ No newline at end of file diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java new file mode 100644 index 000000000..d10331adf --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/consumers/MockRepositoryMetadataConsumer.java @@ -0,0 +1,69 @@ +package org.apache.maven.archiva.common.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; + +import java.util.HashMap; +import java.util.Map; + +/** + * MockRepositoryMetadataConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers" + * role-hint="mock-metadata" + * instantiation-strategy="per-lookup" + */ +public class MockRepositoryMetadataConsumer + extends GenericRepositoryMetadataConsumer +{ + private Map repositoryMetadataMap = new HashMap(); + + private FileProblemsTracker problemsTracker = new FileProblemsTracker(); + + public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file ) + { + repositoryMetadataMap.put( file.getRelativePath(), metadata ); + } + + public void processFileProblem( BaseFile file, String message ) + { + problemsTracker.addProblem( file, message ); + } + + public Map getRepositoryMetadataMap() + { + return repositoryMetadataMap; + } + + public String getName() + { + return "Mock RepositoryMetadata Consumer (Testing Only)"; + } + + public FileProblemsTracker getProblemsTracker() + { + return problemsTracker; + } +} \ No newline at end of file diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java new file mode 100644 index 000000000..db0ec4f38 --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/AllTests.java @@ -0,0 +1,44 @@ +package org.apache.maven.archiva.common.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.common.utils" ); + //$JUnit-BEGIN$ + suite.addTestSuite( PathUtilTest.class ); + suite.addTestSuite( BaseFileTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java new file mode 100644 index 000000000..53412badf --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/BaseFileTest.java @@ -0,0 +1,115 @@ +package org.apache.maven.archiva.common.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.io.File; + +import junit.framework.TestCase; + +/** + * BaseFileTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class BaseFileTest + extends TestCase +{ + public void testFileString() + { + File repoDir = new File( "/home/user/foo/repository" ); + String pathFile = "path/to/resource.xml"; + BaseFile file = new BaseFile( repoDir, pathFile ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testFileFile() + { + File repoDir = new File( "/home/user/foo/repository" ); + File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" ); + BaseFile file = new BaseFile( repoDir, pathFile ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testStringFile() + { + String repoDir = "/home/user/foo/repository"; + File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" ); + BaseFile file = new BaseFile( repoDir, pathFile ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testFileThenSetBaseString() + { + String repoDir = "/home/user/foo/repository"; + File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" ); + BaseFile file = new BaseFile( pathFile ); + file.setBaseDir( repoDir ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testFileThenSetBaseFile() + { + File repoDir = new File( "/home/user/foo/repository" ); + File pathFile = new File( "/home/user/foo/repository/path/to/resource.xml" ); + BaseFile file = new BaseFile( pathFile ); + file.setBaseDir( repoDir ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testStringThenSetBaseString() + { + String repoDir = "/home/user/foo/repository"; + String pathFile = "/home/user/foo/repository/path/to/resource.xml"; + BaseFile file = new BaseFile( pathFile ); + file.setBaseDir( repoDir ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } + + public void testStringThenSetBaseFile() + { + File repoDir = new File( "/home/user/foo/repository" ); + String pathFile = "/home/user/foo/repository/path/to/resource.xml"; + BaseFile file = new BaseFile( pathFile ); + file.setBaseDir( repoDir ); + + assertEquals( "/home/user/foo/repository/path/to/resource.xml", file.getAbsolutePath() ); + assertEquals( "path/to/resource.xml", file.getRelativePath() ); + assertEquals( new File( "/home/user/foo/repository" ), file.getBaseDir() ); + } +} diff --git a/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java new file mode 100644 index 000000000..58abbd2bf --- /dev/null +++ b/archiva-common/src/test/java/org/apache/maven/archiva/common/utils/PathUtilTest.java @@ -0,0 +1,40 @@ +package org.apache.maven.archiva.common.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.utils.PathUtil; + +import junit.framework.TestCase; + +/** + * PathUtilTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class PathUtilTest + extends TestCase +{ + public void testToRelative() + { + assertEquals( "path/to/resource.xml", PathUtil.getRelative( "/home/user/foo/repository", + "/home/user/foo/repository/path/to/resource.xml" ) ); + } +} diff --git a/archiva-common/src/test/legacy-repository/CVS/Root b/archiva-common/src/test/legacy-repository/CVS/Root new file mode 100644 index 000000000..2e65f24a6 --- /dev/null +++ b/archiva-common/src/test/legacy-repository/CVS/Root @@ -0,0 +1 @@ +not a real CVS root - for testing exclusions diff --git a/archiva-common/src/test/legacy-repository/KEYS b/archiva-common/src/test/legacy-repository/KEYS new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/foo/invalid-1.0.foo b/archiva-common/src/test/legacy-repository/invalid/foo/invalid-1.0.foo new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/invalid-1.0.jar b/archiva-common/src/test/legacy-repository/invalid/invalid-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/1.0/invalid-1.0.jar b/archiva-common/src/test/legacy-repository/invalid/jars/1.0/invalid-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/invalid-1.0.rar b/archiva-common/src/test/legacy-repository/invalid/jars/invalid-1.0.rar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/invalid.jar b/archiva-common/src/test/legacy-repository/invalid/jars/invalid.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/invalid/jars/no-extension b/archiva-common/src/test/legacy-repository/invalid/jars/no-extension new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/javax.sql/jars/jdbc-2.0.jar b/archiva-common/src/test/legacy-repository/javax.sql/jars/jdbc-2.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-not-updated-1.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/legacy-repository/org.apache.maven.update/jars/test-updated-1.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/some-ejb-1.0-client.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/some-ejb-1.0-client.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-20050611.112233-1.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-20050611.112233-1.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-sources.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0-sources.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.tar.gz b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.tar.gz new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.zip b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-1.0.zip new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-UNKNOWN.jar b/archiva-common/src/test/legacy-repository/org.apache.maven/jars/testing-UNKNOWN.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/CVS/Root b/archiva-common/src/test/repository/CVS/Root new file mode 100644 index 000000000..2e65f24a6 --- /dev/null +++ b/archiva-common/src/test/repository/CVS/Root @@ -0,0 +1 @@ +not a real CVS root - for testing exclusions diff --git a/archiva-common/src/test/repository/KEYS b/archiva-common/src/test/repository/KEYS new file mode 100644 index 000000000..d3b34d5ad --- /dev/null +++ b/archiva-common/src/test/repository/KEYS @@ -0,0 +1 @@ +test KEYS file \ No newline at end of file diff --git a/archiva-common/src/test/repository/invalid/invalid-1.0.jar b/archiva-common/src/test/repository/invalid/invalid-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar b/archiva-common/src/test/repository/invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar b/archiva-common/src/test/repository/invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-1.0b.jar b/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-1.0b.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-2.0.jar b/archiva-common/src/test/repository/invalid/invalid/1.0/invalid-2.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/invalid/invalid/1/invalid-1 b/archiva-common/src/test/repository/invalid/invalid/1/invalid-1 new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/javax/maven-metadata.xml b/archiva-common/src/test/repository/javax/maven-metadata.xml new file mode 100644 index 000000000..b3baf545d --- /dev/null +++ b/archiva-common/src/test/repository/javax/maven-metadata.xml @@ -0,0 +1,26 @@ + + + + + + javax.sql + jdbc + 2.0 + diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/2.0/jdbc-2.0.jar b/archiva-common/src/test/repository/javax/sql/jdbc/2.0/jdbc-2.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml new file mode 100644 index 000000000..caf5b6697 --- /dev/null +++ b/archiva-common/src/test/repository/javax/sql/jdbc/2.0/maven-metadata-repository.xml @@ -0,0 +1,25 @@ + + + + + javax.sql + jdbc + 2.0 + diff --git a/archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml new file mode 100644 index 000000000..bb7570891 --- /dev/null +++ b/archiva-common/src/test/repository/javax/sql/jdbc/maven-metadata-repository.xml @@ -0,0 +1,30 @@ + + + + + javax.sql + jdbc + 2.0 + + + 2.0 + + + diff --git a/archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml b/archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml new file mode 100644 index 000000000..caf5b6697 --- /dev/null +++ b/archiva-common/src/test/repository/javax/sql/maven-metadata-repository.xml @@ -0,0 +1,25 @@ + + + + + javax.sql + jdbc + 2.0 + diff --git a/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom new file mode 100644 index 000000000..202a0a448 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.maven + A + 1.0 + Maven Test Repository Artifact Discovery + war + diff --git a/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/A/1.0/A-1.0.war @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom new file mode 100644 index 000000000..fa5f8f6c8 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/B/1.0/B-1.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.maven + B + 1.0 + Maven Test Repository Artifact Discovery + pom + diff --git a/archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom b/archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom new file mode 100644 index 000000000..c3034e820 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/B/2.0/B-2.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.maven + B + 2.0 + Maven Test Repository Artifact Discovery + pom + diff --git a/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom new file mode 100644 index 000000000..ae14cd7eb --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.maven + C + 1.0 + Maven Test Repository Artifact Discovery + war + diff --git a/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/C/1.0/C-1.0.war @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom new file mode 100644 index 000000000..5a29f6117 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/discovery/1.0/discovery-1.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.maven + discovery + 1.0 + Maven Test Repository Artifact Discovery + pom + diff --git a/archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml new file mode 100644 index 000000000..8ce7fc7bb --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/maven-metadata.xml @@ -0,0 +1,23 @@ + + + + + org.apache.maven + \ No newline at end of file diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom new file mode 100644 index 000000000..6ab57d162 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/samplejar/1.0/samplejar-1.0.pom @@ -0,0 +1,29 @@ + + + + 4.0.0 + org.apache.maven + C + 1.0 + Maven Test Repository Artifact Discovery + + + diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom new file mode 100644 index 000000000..a959980df --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/samplejar/2.0/samplejar-2.0.pom @@ -0,0 +1,29 @@ + + + + 4.0.0 + org.apache.maven + C + 1.0 + Maven Test Repository Artifact Discovery + + jar + diff --git a/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/maven-metadata.xml new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar b/archiva-common/src/test/repository/org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar b/archiva-common/src/test/repository/org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-sources.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-sources.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-test-sources.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0-test-sources.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.jar new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.tar.gz b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.tar.gz new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.zip b/archiva-common/src/test/repository/org/apache/maven/testing/1.0/testing-1.0.zip new file mode 100644 index 000000000..e69de29bb diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom new file mode 100644 index 000000000..452727f28 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/1.0/test-not-updated-1.0.pom @@ -0,0 +1,29 @@ + + + + 4.0.0 + org.apache.maven.update + test-not-updated + 1.0 + Maven Test Repository Artifact Discovery + + + diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml new file mode 100644 index 000000000..bd56a21c1 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-not-updated/maven-metadata.xml @@ -0,0 +1,24 @@ + + + + + org.apache.maven.update + test-not-updated + \ No newline at end of file diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar new file mode 100644 index 000000000..54d190b23 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.jar @@ -0,0 +1 @@ +dummy content. sample file only. diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom new file mode 100644 index 000000000..edd7b6479 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/1.0/test-updated-1.0.pom @@ -0,0 +1,29 @@ + + + + 4.0.0 + org.apache.maven.update + test-updated + 1.0 + Maven Test Repository Artifact Discovery + + + diff --git a/archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml new file mode 100644 index 000000000..86e063ca8 --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/maven/update/test-updated/maven-metadata.xml @@ -0,0 +1,24 @@ + + + + + org.apache.maven.update + test-updated + \ No newline at end of file diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom new file mode 100644 index 000000000..12538e81a --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/discovery-1.0.pom @@ -0,0 +1,28 @@ + + + + 4.0.0 + org.apache.testgroup + discovery + 1.0 + Maven Test Repository Artifact Discovery + pom + diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml new file mode 100644 index 000000000..8ee18048c --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/testgroup/discovery/1.0/maven-metadata.xml @@ -0,0 +1,25 @@ + + + + + org.apache.testgroup + discovery + 1.0 + \ No newline at end of file diff --git a/archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml b/archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml new file mode 100644 index 000000000..b024ef7ef --- /dev/null +++ b/archiva-common/src/test/repository/org/apache/testgroup/discovery/maven-metadata.xml @@ -0,0 +1,24 @@ + + + + + org.apache.testgroup + discovery + \ No newline at end of file diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml new file mode 100644 index 000000000..5760e8918 --- /dev/null +++ b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericArtifactConsumerTest.xml @@ -0,0 +1,34 @@ + + + + + + + org.apache.maven.archiva.common.consumers.Consumer + mock-artifact + org.apache.maven.archiva.common.consumers.MockArtifactConsumer + + + org.apache.maven.artifact.factory.ArtifactFactory + + + + + \ No newline at end of file diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml new file mode 100644 index 000000000..2ded1b62d --- /dev/null +++ b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericModelConsumerTest.xml @@ -0,0 +1,34 @@ + + + + + + + org.apache.maven.archiva.discoverer.DiscovererConsumer + mock-model + org.apache.maven.archiva.discoverer.consumers.MockModelConsumer + + + org.apache.maven.artifact.factory.ArtifactFactory + + + + + \ No newline at end of file diff --git a/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml new file mode 100644 index 000000000..da9864d00 --- /dev/null +++ b/archiva-common/src/test/resources/org/apache/maven/archiva/common/consumers/GenericRepositoryMetadataConsumerTest.xml @@ -0,0 +1,34 @@ + + + + + + + org.apache.maven.archiva.discoverer.DiscovererConsumer + mock-metadata + org.apache.maven.archiva.discoverer.consumers.MockRepositoryMetadataConsumer + + + org.apache.maven.artifact.factory.ArtifactFactory + + + + + \ No newline at end of file diff --git a/archiva-configuration/src/main/mdo/configuration.mdo b/archiva-configuration/src/main/mdo/configuration.mdo index d414a9273..2893e289c 100644 --- a/archiva-configuration/src/main/mdo/configuration.mdo +++ b/archiva-configuration/src/main/mdo/configuration.mdo @@ -85,12 +85,13 @@ - indexerCronExpression + dataRefreshCronExpression 1.0.0 String - When to run the indexing mechanism. Default is every 0 and 30 mins of any hour. + When to run the data refresh task. Default is every 30 mins (translated as every 0 and 30 minute reading of every hour) 0 0,30 * * * ? - + + proxy 1.0.0 diff --git a/archiva-configuration/src/test/java/org/apache/maven/archiva/configuration/ArchivaConfigurationTest.java b/archiva-configuration/src/test/java/org/apache/maven/archiva/configuration/ArchivaConfigurationTest.java index 94a2e3166..5164c561d 100644 --- a/archiva-configuration/src/test/java/org/apache/maven/archiva/configuration/ArchivaConfigurationTest.java +++ b/archiva-configuration/src/test/java/org/apache/maven/archiva/configuration/ArchivaConfigurationTest.java @@ -43,7 +43,7 @@ public class ArchivaConfigurationTest // check default configuration assertNotNull( "check configuration returned", configuration ); assertEquals( "check configuration has default elements", "0 0,30 * * * ?", - configuration.getIndexerCronExpression() ); + configuration.getDataRefreshCronExpression() ); assertNull( "check configuration has default elements", configuration.getIndexPath() ); assertTrue( "check configuration has default elements", configuration.getRepositories().isEmpty() ); } diff --git a/archiva-converter/pom.xml b/archiva-converter/pom.xml index d123cdcf9..49bde7bd2 100644 --- a/archiva-converter/pom.xml +++ b/archiva-converter/pom.xml @@ -29,6 +29,10 @@ archiva-converter Archiva Repository Converter + + org.apache.maven.archiva + archiva-discoverer + org.codehaus.plexus plexus-utils @@ -46,8 +50,8 @@ maven-model-converter - org.apache.maven.archiva - archiva-reports-standard + org.codehaus.plexus + plexus-digest org.codehaus.plexus @@ -58,6 +62,13 @@ commons-io commons-io + + + hsqldb + hsqldb + 1.7.3.3 + test + org.codehaus.plexus diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java new file mode 100644 index 000000000..bd5ef5854 --- /dev/null +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionEvent.java @@ -0,0 +1,128 @@ +package org.apache.maven.archiva.converter; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.ArtifactRepository; + +/** + * ConversionEvent + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ConversionEvent +{ + public static final int STARTED = 0; + + public static final int PROCESSED = 1; + + public static final int WARNING = 2; + + public static final int ERROR = 3; + + public static final int FINISHED = 4; + + private int type; + + private String message; + + private Artifact artifact; + + private ArtifactRepository repository; + + private Exception exception; + + public ConversionEvent( ArtifactRepository repository, int type ) + { + this.repository = repository; + this.type = type; + } + + public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact ) + { + this( repository, type ); + this.artifact = artifact; + } + + public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, String message ) + { + this( repository, type ); + this.artifact = artifact; + this.message = message; + } + + public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, Exception exception ) + { + this( repository, type ); + this.artifact = artifact; + this.exception = exception; + } + + public Artifact getArtifact() + { + return artifact; + } + + public Exception getException() + { + return exception; + } + + public String getMessage() + { + return message; + } + + public ArtifactRepository getRepository() + { + return repository; + } + + /** + *

+ * The type of event. + *

+ * + *

+ * Can be one of the following ... + *

+ * + *
    + *
  • {@link #STARTED} - the whole repository conversion process has started. + * only seen when using the whole repository conversion technique with the + * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)} + * method.
  • + *
  • {@link #PROCESSED} - a specific artifact has been processed.
  • + *
  • {@link #WARNING} - a warning has been detected for a specific artifact during the conversion process.
  • + *
  • {@link #ERROR} - an error in the processing of an artifact has been detected.
  • + *
  • {@link #FINISHED} - the whole repository conversion process has finished. + * only seen when using the whole repository conversion technique with the + * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)} + * method.
  • + *
+ * @return + */ + public int getType() + { + return type; + } +} diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java new file mode 100644 index 000000000..3193bd475 --- /dev/null +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/ConversionListener.java @@ -0,0 +1,31 @@ +package org.apache.maven.archiva.converter; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * ConversionListener + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public interface ConversionListener +{ + public void conversionEvent( ConversionEvent event ); +} diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java index c723a11ab..5b03d6f48 100644 --- a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java @@ -22,7 +22,6 @@ package org.apache.maven.archiva.converter; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.maven.archiva.converter.transaction.FileTransaction; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager; @@ -38,7 +37,6 @@ import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer; import org.apache.maven.model.DistributionManagement; import org.apache.maven.model.Model; import org.apache.maven.model.Relocation; -import org.apache.maven.model.converter.ArtifactPomRewriter; import org.apache.maven.model.converter.ModelConverter; import org.apache.maven.model.converter.PomTranslationException; import org.apache.maven.model.io.xpp3.MavenXpp3Writer; @@ -46,6 +44,7 @@ import org.apache.maven.model.v3_0_0.io.xpp3.MavenXpp3Reader; import org.codehaus.plexus.digest.Digester; import org.codehaus.plexus.digest.DigesterException; import org.codehaus.plexus.i18n.I18N; +import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import java.io.File; @@ -54,6 +53,7 @@ import java.io.FileReader; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -67,6 +67,7 @@ import java.util.regex.Matcher; * @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default" */ public class DefaultRepositoryConverter + extends AbstractLogEnabled implements RepositoryConverter { /** @@ -81,11 +82,6 @@ public class DefaultRepositoryConverter */ private ArtifactFactory artifactFactory; - /** - * @plexus.requirement - */ - private ArtifactPomRewriter rewriter; - /** * @plexus.requirement */ @@ -111,7 +107,9 @@ public class DefaultRepositoryConverter */ private I18N i18n; - public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter ) + private List listeners = new ArrayList(); + + public void convert( Artifact artifact, ArtifactRepository targetRepository ) throws RepositoryConversionException { if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) ) @@ -119,20 +117,19 @@ public class DefaultRepositoryConverter throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) ); } - if ( validateMetadata( artifact, reporter ) ) + if ( validateMetadata( artifact ) ) { FileTransaction transaction = new FileTransaction(); - if ( copyPom( artifact, targetRepository, reporter, transaction ) ) + if ( copyPom( artifact, targetRepository, transaction ) ) { - if ( copyArtifact( artifact, targetRepository, reporter, transaction ) ) + if ( copyArtifact( artifact, targetRepository, transaction ) ) { Metadata metadata = createBaseMetadata( artifact ); Versioning versioning = new Versioning(); versioning.addVersion( artifact.getBaseVersion() ); metadata.setVersioning( versioning ); - updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, - transaction ); + updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, transaction ); metadata = createBaseMetadata( artifact ); metadata.setVersion( artifact.getBaseVersion() ); @@ -173,8 +170,8 @@ public class DefaultRepositoryConverter Metadata newMetadata, FileTransaction transaction ) throws RepositoryConversionException { - File file = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File file = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); Metadata metadata; boolean changed; @@ -244,7 +241,7 @@ public class DefaultRepositoryConverter return metadata; } - private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter ) + private boolean validateMetadata( Artifact artifact ) throws RepositoryConversionException { ArtifactRepository repository = artifact.getRepository(); @@ -252,12 +249,11 @@ public class DefaultRepositoryConverter boolean result = true; RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact ); - File file = - new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) ); + File file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) ); if ( file.exists() ) { Metadata metadata = readMetadata( file ); - result = validateMetadata( metadata, repositoryMetadata, artifact, reporter ); + result = validateMetadata( metadata, repositoryMetadata, artifact ); } repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); @@ -265,14 +261,13 @@ public class DefaultRepositoryConverter if ( file.exists() ) { Metadata metadata = readMetadata( file ); - result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter ); + result = result && validateMetadata( metadata, repositoryMetadata, artifact ); } return result; } - private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact, - ReportingDatabase reporter ) + private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact ) { String groupIdKey; String artifactIdKey = null; @@ -302,14 +297,14 @@ public class DefaultRepositoryConverter if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) ) { - addFailure( reporter, artifact, groupIdKey ); + addFailure( artifact, groupIdKey ); result = false; } if ( !repositoryMetadata.storedInGroupDirectory() ) { if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) ) { - addFailure( reporter, artifact, artifactIdKey ); + addFailure( artifact, artifactIdKey ); result = false; } if ( !repositoryMetadata.storedInArtifactVersionDirectory() ) @@ -319,8 +314,7 @@ public class DefaultRepositoryConverter boolean foundVersion = false; if ( metadata.getVersioning() != null ) { - for ( Iterator i = metadata.getVersioning().getVersions().iterator(); - i.hasNext() && !foundVersion; ) + for ( Iterator i = metadata.getVersioning().getVersions().iterator(); i.hasNext() && !foundVersion; ) { String version = (String) i.next(); if ( version.equals( artifact.getBaseVersion() ) ) @@ -332,7 +326,7 @@ public class DefaultRepositoryConverter if ( !foundVersion ) { - addFailure( reporter, artifact, versionsKey ); + addFailure( artifact, versionsKey ); result = false; } } @@ -341,7 +335,7 @@ public class DefaultRepositoryConverter // snapshot metadata if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) ) { - addFailure( reporter, artifact, versionKey ); + addFailure( artifact, versionKey ); result = false; } @@ -364,7 +358,7 @@ public class DefaultRepositoryConverter if ( !correct ) { - addFailure( reporter, artifact, snapshotKey ); + addFailure( artifact, snapshotKey ); result = false; } } @@ -374,30 +368,30 @@ public class DefaultRepositoryConverter return result; } - private void addFailure( ReportingDatabase reporter, Artifact artifact, String key ) + private void addFailure( Artifact artifact, String key ) { - addFailureWithReason( reporter, artifact, getI18NString( key ) ); - + addFailureWithReason( artifact, getI18NString( key ) ); } - private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message ) + private void addWarning( Artifact artifact, String message ) { // TODO: should we be able to identify/fix these? - reporter.addWarning( artifact, null, null, message ); + // TODO: write archiva-artifact-repair module + triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.WARNING, artifact, + message ) ); } - private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason ) + private void addFailureWithReason( Artifact artifact, String reason ) { // TODO: should we be able to identify/fix these? - reporter.addFailure( artifact, null, null, reason ); + triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.ERROR, artifact, reason ) ); } - private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter, - FileTransaction transaction ) + private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction ) throws RepositoryConversionException { - Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), - artifact.getVersion() ); + Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact + .getVersion() ); pom.setBaseVersion( artifact.getBaseVersion() ); ArtifactRepository repository = artifact.getRepository(); File file = new File( repository.getBasedir(), repository.pathOf( pom ) ); @@ -411,7 +405,7 @@ public class DefaultRepositoryConverter boolean checksumsValid = false; try { - if ( testChecksums( artifact, file, reporter ) ) + if ( testChecksums( artifact, file ) ) { checksumsValid = true; } @@ -457,16 +451,15 @@ public class DefaultRepositoryConverter if ( doRelocation( artifact, v3Model, targetRepository, transaction ) ) { - Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), - artifact.getArtifactId(), - artifact.getVersion() ); + Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact + .getArtifactId(), artifact.getVersion() ); targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) ); } Model v4Model = translator.translate( v3Model ); - translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), - v3Model.getVersion(), v3Model.getPackage() ); + translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), v3Model + .getVersion(), v3Model.getPackage() ); writer = new StringWriter(); MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer(); @@ -479,13 +472,12 @@ public class DefaultRepositoryConverter for ( Iterator i = warnings.iterator(); i.hasNext(); ) { String message = (String) i.next(); - addWarning( reporter, artifact, message ); + addWarning( artifact, message ); } } catch ( XmlPullParserException e ) { - addFailureWithReason( reporter, artifact, - getI18NString( "failure.invalid.source.pom", e.getMessage() ) ); + addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) ); result = false; } catch ( IOException e ) @@ -494,8 +486,7 @@ public class DefaultRepositoryConverter } catch ( PomTranslationException e ) { - addFailureWithReason( reporter, artifact, - getI18NString( "failure.invalid.source.pom", e.getMessage() ) ); + addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) ); result = false; } finally @@ -506,7 +497,7 @@ public class DefaultRepositoryConverter } else { - addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) ); + addWarning( artifact, getI18NString( "warning.missing.pom" ) ); } return result; } @@ -516,8 +507,8 @@ public class DefaultRepositoryConverter throws IOException { Properties properties = v3Model.getProperties(); - if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) || - properties.containsKey( "relocated.version" ) ) + if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) + || properties.containsKey( "relocated.version" ) ) { String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() ); properties.remove( "relocated.groupId" ); @@ -600,7 +591,7 @@ public class DefaultRepositoryConverter return i18n.getString( getClass().getName(), Locale.getDefault(), key ); } - private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter ) + private boolean testChecksums( Artifact artifact, File file ) throws IOException { boolean result = true; @@ -609,7 +600,7 @@ public class DefaultRepositoryConverter { Digester digester = (Digester) it.next(); result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester, - reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) ); + artifact, "failure.incorrect." + getDigesterFileExtension( digester ) ); } return result; } @@ -623,8 +614,7 @@ public class DefaultRepositoryConverter return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" ); } - private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter, - Artifact artifact, String key ) + private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact, String key ) throws IOException { boolean result = true; @@ -639,15 +629,14 @@ public class DefaultRepositoryConverter } catch ( DigesterException e ) { - addFailure( reporter, artifact, key ); + addFailure( artifact, key ); result = false; } } return result; } - private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter, - FileTransaction transaction ) + private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction ) throws RepositoryConversionException { File sourceFile = artifact.getFile(); @@ -668,7 +657,7 @@ public class DefaultRepositoryConverter matching = FileUtils.contentEquals( sourceFile, targetFile ); if ( !matching ) { - addFailure( reporter, artifact, "failure.target.already.exists" ); + addFailure( artifact, "failure.target.already.exists" ); result = false; } } @@ -676,7 +665,7 @@ public class DefaultRepositoryConverter { if ( force || !matching ) { - if ( testChecksums( artifact, sourceFile, reporter ) ) + if ( testChecksums( artifact, sourceFile ) ) { transaction.copyFile( sourceFile, targetFile, digesters ); } @@ -694,7 +683,7 @@ public class DefaultRepositoryConverter return result; } - public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter ) + public void convert( List artifacts, ArtifactRepository targetRepository ) throws RepositoryConversionException { for ( Iterator i = artifacts.iterator(); i.hasNext(); ) @@ -703,20 +692,49 @@ public class DefaultRepositoryConverter try { - convert( artifact, targetRepository, reporter ); + convert( artifact, targetRepository ); } catch ( RepositoryConversionException e ) { - // Need to add: - // artifact - // processor - // problem - // reason - //TODO: this doesn't really provide any real facility for a decent error message, having - // the stack trace would be useful. I also have no idea what a processor is currently or - // how to get hold of it here. - - reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() ); + triggerConversionEvent( new ConversionEvent( targetRepository, ConversionEvent.ERROR, artifact, e ) ); + } + } + } + + /** + * Add a listener to the conversion process. + * + * @param listener the listener to add. + */ + public void addConversionListener( ConversionListener listener ) + { + listeners.add( listener ); + } + + /** + * Remove a listener from the conversion process. + * + * @param listener the listener to remove. + */ + public void removeConversionListener( ConversionListener listener ) + { + listeners.remove( listener ); + } + + private void triggerConversionEvent( ConversionEvent event ) + { + Iterator it = listeners.iterator(); + while ( it.hasNext() ) + { + ConversionListener listener = (ConversionListener) it.next(); + + try + { + listener.conversionEvent( event ); + } + catch ( Throwable t ) + { + getLogger().warn( "ConversionEvent resulted in exception from listener: " + t.getMessage(), t ); } } } diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java index d47d5b1f9..37c21fcce 100644 --- a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java @@ -19,7 +19,6 @@ package org.apache.maven.archiva.converter; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -39,9 +38,8 @@ public interface RepositoryConverter * * @param artifact the artifact to convert * @param targetRepository the target repository - * @param reporter reporter to track the results of the conversion */ - void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter ) + void convert( Artifact artifact, ArtifactRepository targetRepository ) throws RepositoryConversionException; /** @@ -49,8 +47,21 @@ public interface RepositoryConverter * * @param artifacts the set of artifacts to convert * @param targetRepository the target repository - * @param reporter reporter to track the results of the conversions */ - void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter ) + void convert( List artifacts, ArtifactRepository targetRepository ) throws RepositoryConversionException; + + /** + * Add a listener to the conversion process. + * + * @param listener the listener to add. + */ + void addConversionListener( ConversionListener listener ); + + /** + * Remove a listener from the conversion process. + * + * @param listener the listener to remove. + */ + void removeConversionListener( ConversionListener listener ); } diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java new file mode 100644 index 000000000..469cc33e3 --- /dev/null +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java @@ -0,0 +1,124 @@ +package org.apache.maven.archiva.converter.legacy; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.converter.ConversionListener; +import org.apache.maven.archiva.converter.RepositoryConversionException; +import org.apache.maven.archiva.discoverer.Discoverer; +import org.apache.maven.archiva.discoverer.DiscovererException; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; + +import java.io.File; +import java.net.MalformedURLException; +import java.util.ArrayList; +import java.util.List; + +/** + * @author Jason van Zyl + * @plexus.component + * @todo turn this into a general conversion component and hide all this crap here. + * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies + */ +public class DefaultLegacyRepositoryConverter + implements LegacyRepositoryConverter +{ + /** + * @plexus.requirement role-hint="legacy" + */ + private ArtifactRepositoryLayout legacyLayout; + + /** + * @plexus.requirement role-hint="default" + */ + private ArtifactRepositoryLayout defaultLayout; + + /** + * @plexus.requirement + */ + private ArtifactRepositoryFactory artifactRepositoryFactory; + + /** + * @plexus.requirement role-hint="default" + */ + private Discoverer discoverer; + + /** + * @plexus.requirement role="org.apache.maven.archiva.common.consumers.Consumer" role-hint="legacy-converter" + */ + private LegacyConverterArtifactConsumer legacyConverterConsumer; + + public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, + List fileExclusionPatterns, boolean includeSnapshots ) + throws RepositoryConversionException + { + ArtifactRepository legacyRepository; + + ArtifactRepository repository; + + try + { + legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDirectory + .toURI().toURL().toString(), legacyLayout, null, null ); + + repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDirectory.toURI() + .toURL().toString(), defaultLayout, null, null ); + } + catch ( MalformedURLException e ) + { + throw new RepositoryConversionException( "Error convering legacy repository.", e ); + } + + try + { + List consumers = new ArrayList(); + legacyConverterConsumer.setDestinationRepository( repository ); + consumers.add( legacyConverterConsumer ); + + discoverer.walkRepository( legacyRepository, consumers, includeSnapshots ); + } + catch ( DiscovererException e ) + { + throw new RepositoryConversionException( "Unable to convert repository due to discoverer error:" + + e.getMessage(), e ); + } + } + + /** + * Add a listener to the conversion process. + * + * @param listener the listener to add. + */ + public void addConversionListener( ConversionListener listener ) + { + legacyConverterConsumer.addConversionListener( listener ); + } + + /** + * Remove a listener from the conversion process. + * + * @param listener the listener to remove. + */ + public void removeConversionListener( ConversionListener listener ) + { + legacyConverterConsumer.removeConversionListener( listener ); + } +} diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java new file mode 100644 index 000000000..a5b758c4b --- /dev/null +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyConverterArtifactConsumer.java @@ -0,0 +1,105 @@ +package org.apache.maven.archiva.converter.legacy; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.archiva.converter.ConversionListener; +import org.apache.maven.archiva.converter.RepositoryConversionException; +import org.apache.maven.archiva.converter.RepositoryConverter; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.ArtifactRepository; + +/** + * LegacyConverterArtifactConsumer - convert artifacts as they are found + * into the destination repository. + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers" + * role-hint="legacy-converter" + * instantiation-strategy="per-lookup" + */ +public class LegacyConverterArtifactConsumer + extends GenericArtifactConsumer +{ + /** + * @plexus.requirement + */ + private RepositoryConverter repositoryConverter; + + private ArtifactRepository destinationRepository; + + public void processArtifact( Artifact artifact, BaseFile file ) + { + try + { + repositoryConverter.convert( artifact, destinationRepository ); + } + catch ( RepositoryConversionException e ) + { + getLogger().error( + "Unable to convert artifact " + artifact + " to destination repository " + + destinationRepository, e ); + } + } + + public void processFileProblem( BaseFile path, String message ) + { + getLogger().error( "Artifact Build Failure on " + path + " : " + message ); + // TODO: report this to the ConversionListener? + } + + public ArtifactRepository getDestinationRepository() + { + return destinationRepository; + } + + public void setDestinationRepository( ArtifactRepository destinationRepository ) + { + this.destinationRepository = destinationRepository; + } + + public String getName() + { + return "Legacy Artifact Converter Consumer"; + } + + /** + * Add a listener to the conversion process. + * + * @param listener the listener to add. + */ + public void addConversionListener( ConversionListener listener ) + { + repositoryConverter.addConversionListener( listener ); + } + + /** + * Remove a listener from the conversion process. + * + * @param listener the listener to remove. + */ + public void removeConversionListener( ConversionListener listener ) + { + repositoryConverter.removeConversionListener( listener ); + } +} diff --git a/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java new file mode 100644 index 000000000..60d3d5599 --- /dev/null +++ b/archiva-converter/src/main/java/org/apache/maven/archiva/converter/legacy/LegacyRepositoryConverter.java @@ -0,0 +1,64 @@ +package org.apache.maven.archiva.converter.legacy; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.converter.ConversionListener; +import org.apache.maven.archiva.converter.RepositoryConversionException; + +import java.io.File; +import java.util.List; + +/** + * Convert an entire repository. + * + * @author Jason van Zyl + */ +public interface LegacyRepositoryConverter +{ + String ROLE = LegacyRepositoryConverter.class.getName(); + + /** + * Convert a legacy repository to a modern repository. This means a Maven 1.x repository + * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs. + * + * @param legacyRepositoryDirectory the directory of the legacy repository. + * @param destinationRepositoryDirectory the directory of the modern repository. + * @param fileExclusionPatterns the list of patterns to exclude from the conversion. + * @param includeSnapshots true to include snapshots in conversion or not. + * @throws RepositoryConversionException + */ + void convertLegacyRepository( File legacyRepositoryDirectory, File destinationRepositoryDirectory, + List fileExclusionPatterns, boolean includeSnapshots ) + throws RepositoryConversionException; + + /** + * Add a listener to the conversion process. + * + * @param listener the listener to add. + */ + void addConversionListener( ConversionListener listener ); + + /** + * Remove a listener from the conversion process. + * + * @param listener the listener to remove. + */ + void removeConversionListener( ConversionListener listener ); +} diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java new file mode 100644 index 000000000..dbd54f604 --- /dev/null +++ b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/AllTests.java @@ -0,0 +1,44 @@ +package org.apache.maven.archiva.converter; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter" ); + //$JUnit-BEGIN$ + suite.addTest( org.apache.maven.archiva.converter.transaction.AllTests.suite() ); + suite.addTestSuite( RepositoryConverterTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java new file mode 100644 index 000000000..92b70069b --- /dev/null +++ b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/MockConversionListener.java @@ -0,0 +1,163 @@ +package org.apache.maven.archiva.converter; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.artifact.Artifact; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * MockConversionListener + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class MockConversionListener + implements ConversionListener +{ + private Map warnings = new HashMap(); + + private Map errors = new HashMap(); + + private Map exceptions = new HashMap(); + + private List processed = new ArrayList(); + + private List repositories = new ArrayList(); + + public void conversionEvent( ConversionEvent event ) + { + switch ( event.getType() ) + { + case ConversionEvent.STARTED: + addUnique( repositories, event.getRepository() ); + break; + case ConversionEvent.PROCESSED: + addUnique( processed, event.getArtifact() ); + break; + case ConversionEvent.WARNING: + if ( event.getException() != null ) + { + addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() ); + } + + if ( event.getMessage() != null ) + { + addObjectList( warnings, toKey( event.getArtifact() ), event.getMessage() ); + } + break; + case ConversionEvent.ERROR: + if ( event.getException() != null ) + { + addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() ); + } + + if ( event.getMessage() != null ) + { + addObjectList( errors, toKey( event.getArtifact() ), event.getMessage() ); + } + break; + case ConversionEvent.FINISHED: + addUnique( repositories, event.getRepository() ); + break; + } + } + + public String toKey( Artifact artifact ) + { + return StringUtils.defaultString( artifact.getGroupId() ) + ":" + + StringUtils.defaultString( artifact.getArtifactId() ) + ":" + + StringUtils.defaultString( artifact.getVersion() ) + ":" + StringUtils.defaultString( artifact.getType() ) + + ":" + StringUtils.defaultString( artifact.getClassifier() ); + } + + private void addObjectList( Map map, String key, Object value ) + { + List objlist = (List) map.get( key ); + if ( objlist == null ) + { + objlist = new ArrayList(); + } + + objlist.add( value ); + + map.put( key, objlist ); + } + + private void addUnique( Collection collection, Object obj ) + { + if ( !collection.contains( obj ) ) + { + collection.add( obj ); + } + } + + public Map getErrors() + { + return errors; + } + + public Map getExceptions() + { + return exceptions; + } + + public List getProcessed() + { + return processed; + } + + public List getRepositories() + { + return repositories; + } + + public Map getWarnings() + { + return warnings; + } + + private int getObjectListCount( Map map ) + { + int count = 0; + for ( Iterator it = map.values().iterator(); it.hasNext(); ) + { + List objList = (List) it.next(); + count += objList.size(); + } + return count; + } + + public int getWarningMessageCount() + { + return getObjectListCount( warnings ); + } + + public int getErrorMessageCount() + { + return getObjectListCount( errors ); + } +} diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java index 15d5f187c..8e0c676e3 100644 --- a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java +++ b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java @@ -20,10 +20,6 @@ package org.apache.maven.archiva.converter; */ import org.apache.commons.io.FileUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.model.ArtifactResults; -import org.apache.maven.archiva.reporting.model.Result; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.metadata.ArtifactMetadata; @@ -42,6 +38,8 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; import java.util.regex.Matcher; /** @@ -64,8 +62,6 @@ public class RepositoryConverterTest private ArtifactFactory artifactFactory; - private ReportingDatabase reportingDatabase; - private static final int SLEEP_MILLIS = 100; private I18N i18n; @@ -80,25 +76,28 @@ public class RepositoryConverterTest ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" ); File sourceBase = getTestFile( "src/test/source-repository" ); - sourceRepository = - factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null ); + sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, + null ); layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" ); File targetBase = getTestFile( "target/test-target-repository" ); copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase ); - targetRepository = - factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null ); + targetRepository = factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, + null ); repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" ); artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE ); i18n = (I18N) lookup( I18N.ROLE ); + } - ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" ); - reportingDatabase = new ReportingDatabase( reportGroup ); + protected void tearDown() + throws Exception + { + super.tearDown(); } private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory ) @@ -135,8 +134,8 @@ public class RepositoryConverterTest { if ( !destination.exists() && !destination.mkdirs() ) { - throw new IOException( - "Could not create destination directory '" + destination.getAbsolutePath() + "'." ); + throw new IOException( "Could not create destination directory '" + + destination.getAbsolutePath() + "'." ); } copyDirectoryStructure( file, destination ); } @@ -155,20 +154,23 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File versionMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) ); + File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( versionMetadata ) ); versionMetadataFile.delete(); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); artifactFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkSuccess(listener); assertTrue( "Check artifact created", artifactFile.exists() ); assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) ); @@ -200,17 +202,20 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File versionMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) ); + File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( versionMetadata ) ); versionMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkSuccess(listener); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -241,22 +246,21 @@ public class RepositoryConverterTest { Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File versionMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) ); + File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( versionMetadata ) ); versionMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); + repositoryConverter.convert( artifact, targetRepository ); //checkSuccess(); --> commented until MNG-2100 is fixed File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check if relocated artifact created", artifactFile.exists() ); - assertTrue( "Check if relocated artifact matches", - FileUtils.contentEquals( artifactFile, artifact.getFile() ) ); + assertTrue( "Check if relocated artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) ); Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" ); File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) ); File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) ); @@ -276,19 +280,20 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File versionMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) ); + File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( versionMetadata ) ); versionMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() ); - assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() ); - assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 2 ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -311,17 +316,20 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v4artifact", version ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File snapshotMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); + File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); snapshotMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -354,17 +362,20 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File snapshotMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); + File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); snapshotMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -410,11 +421,10 @@ public class RepositoryConverterTest public void testMavenOnePluginConversion() throws Exception { - Artifact artifact = - createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" ); - artifact.setFile( - new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); + Artifact artifact = createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", + "maven-plugin" ); + artifact.setFile( new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) ); + repositoryConverter.convert( artifact, targetRepository ); // There is a warning but I can't figure out how to look at it. Eyeballing the results it appears // the plugin is being coverted correctly. //checkSuccess(); @@ -424,14 +434,14 @@ public class RepositoryConverterTest assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) ); /* - The POM isn't needed for Maven 1.x plugins but the raw conversion for - - artifact = createPomArtifact( artifact ); - File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); - File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" ); - assertTrue( "Check POM created", pomFile.exists() ); - compareFiles( expectedPomFile, pomFile ); - */ + The POM isn't needed for Maven 1.x plugins but the raw conversion for + + artifact = createPomArtifact( artifact ); + File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); + File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" ); + assertTrue( "Check POM created", pomFile.exists() ); + compareFiles( expectedPomFile, pomFile ); + */ } public void testV3TimestampedSnapshotPomConvert() @@ -441,17 +451,20 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); artifactMetadataFile.delete(); ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File snapshotMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); + File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( snapshotMetadata ) ); snapshotMetadataFile.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -483,11 +496,13 @@ public class RepositoryConverterTest // test that a POM is not created when there was none at the source Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() ); - assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() ); - assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() ); - assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 1 ); + + assertHasWarningReason( listener, getI18nString( "warning.missing.pom" ) ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -510,15 +525,19 @@ public class RepositoryConverterTest File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); file.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 1, 0 ); + + assertHasErrorReason( listener, getI18nString( "failure.incorrect.md5" ) ); assertFalse( "Check artifact not created", file.exists() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -531,15 +550,19 @@ public class RepositoryConverterTest File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); file.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 1, 0 ); + + assertHasErrorReason( listener, getI18nString( "failure.incorrect.sha1" ) ); assertFalse( "Check artifact not created", file.exists() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -568,8 +591,11 @@ public class RepositoryConverterTest // Need to guarantee last modified is not equal Thread.sleep( SLEEP_MILLIS ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); compareFiles( sourceFile, targetFile ); compareFiles( sourcePomFile, targetPomFile ); @@ -604,17 +630,20 @@ public class RepositoryConverterTest // Need to guarantee last modified is not equal Thread.sleep( SLEEP_MILLIS ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ), - getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 1, 0 ); + + assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) ); assertEquals( "Check unmodified", origTime, targetFile.lastModified() ); assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -641,8 +670,11 @@ public class RepositoryConverterTest sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() ); sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); compareFiles( sourceFile, targetFile ); compareFiles( sourcePomFile, targetPomFile ); @@ -651,8 +683,8 @@ public class RepositoryConverterTest assertFalse( "Check modified", origTime == targetPomFile.lastModified() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertTrue( "Check metadata created", metadataFile.exists() ); } @@ -671,8 +703,11 @@ public class RepositoryConverterTest File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 0, 0 ); assertTrue( "Check source file exists", sourceFile.exists() ); assertTrue( "Check source POM exists", sourcePomFile.exists() ); @@ -681,8 +716,8 @@ public class RepositoryConverterTest assertFalse( "Check target POM doesn't exist", targetPomFile.exists() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -713,17 +748,20 @@ public class RepositoryConverterTest // Need to guarantee last modified is not equal Thread.sleep( SLEEP_MILLIS ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ), - getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkFailure(listener); + + assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) ); assertEquals( "Check unmodified", origTime, targetFile.lastModified() ); assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -734,20 +772,38 @@ public class RepositoryConverterTest Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() ); ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact ); - File versionMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) ); + File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( versionMetadata ) ); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkCounts( listener, 1, 0 ); + + List messages = (List) listener.getErrors().get( listener.toKey( artifact )); + assertNotNull("Should have error messages."); + + boolean found = false; String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$"; - assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) ); + for ( Iterator it = messages.iterator(); it.hasNext(); ) + { + String reason = (String) it.next(); + if( reason.matches( pattern ) ) + { + found = true; + break; + } + } + + assertTrue( "Check failure message.", found ); assertFalse( "check artifact rolled back", artifactFile.exists() ); assertFalse( "check metadata rolled back", artifactMetadataFile.exists() ); @@ -763,10 +819,12 @@ public class RepositoryConverterTest artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) ); artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) ); artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) ); - repositoryConverter.convert( artifacts, targetRepository, reportingDatabase ); - assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() ); - assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() ); - assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() ); + + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifacts, targetRepository ); + checkCounts( listener, 0, 0 ); for ( Iterator i = artifacts.iterator(); i.hasNext(); ) { @@ -778,8 +836,8 @@ public class RepositoryConverterTest artifact = createPomArtifact( artifact ); File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); - File expectedPomFile = - getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" ); + File expectedPomFile = getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + + ".pom" ); assertTrue( "Check POM created", pomFile.exists() ); compareFiles( expectedPomFile, pomFile ); @@ -797,16 +855,19 @@ public class RepositoryConverterTest File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); file.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ), - getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkFailure(listener); + + assertHasErrorReason( listener, getI18nString( "failure.incorrect.artifactMetadata.versions" ) ); assertFalse( "Check artifact not created", file.exists() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -821,16 +882,19 @@ public class RepositoryConverterTest File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); file.delete(); - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkFailure(); - assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ), - getFailure().getReason() ); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkFailure(listener); + + assertHasErrorReason( listener, getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ) ); assertFalse( "Check artifact not created", file.exists() ); ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact ); - File metadataFile = - new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) ); + File metadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( metadata ) ); assertFalse( "Check metadata not created", metadataFile.exists() ); } @@ -840,9 +904,11 @@ public class RepositoryConverterTest // test artifact level metadata is merged when it already exists on successful conversion Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" ); - - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); - checkSuccess(); + MockConversionListener listener = new MockConversionListener(); + + repositoryConverter.addConversionListener( listener ); + repositoryConverter.convert( artifact, targetRepository ); + checkSuccess(listener); File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) ); assertTrue( "Check artifact created", artifactFile.exists() ); @@ -856,8 +922,8 @@ public class RepositoryConverterTest compareFiles( sourcePomFile, pomFile ); ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact ); - File artifactMetadataFile = new File( targetRepository.getBasedir(), - targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) ); + File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository + .pathOfRemoteRepositoryMetadata( artifactMetadata ) ); assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() ); File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" ); @@ -872,14 +938,14 @@ public class RepositoryConverterTest ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE ); - sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), - targetRepository.getLayout(), null, null ); + sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), targetRepository + .getLayout(), null, null ); Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" ); try { - repositoryConverter.convert( artifact, targetRepository, reportingDatabase ); + repositoryConverter.convert( artifact, targetRepository ); fail( "Should have failed trying to convert within the same repository" ); } catch ( RepositoryConversionException e ) @@ -905,8 +971,7 @@ public class RepositoryConverterTest return createArtifact( groupId, artifactId, baseVersion, version, "jar" ); } - private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, - String type ) + private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, String type ) { Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type ); artifact.setBaseVersion( baseVersion ); @@ -917,8 +982,8 @@ public class RepositoryConverterTest private Artifact createPomArtifact( Artifact artifact ) { - return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), - artifact.getVersion(), "pom" ); + return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact + .getVersion(), "pom" ); } private static void compareFiles( File expectedPomFile, File pomFile ) @@ -926,8 +991,7 @@ public class RepositoryConverterTest { String expectedContent = normalizeString( FileUtils.readFileToString( expectedPomFile, null ) ); String targetContent = normalizeString( FileUtils.readFileToString( pomFile, null ) ); - assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, - targetContent ); + assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, targetContent ); } private static String normalizeString( String path ) @@ -935,35 +999,79 @@ public class RepositoryConverterTest return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" ); } - private void checkSuccess() + private void checkSuccess(MockConversionListener listener) { - assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() ); - assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() ); - assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() ); + checkCounts( listener, 0, 0 ); } - private void checkFailure() + private void checkFailure(MockConversionListener listener) { - assertEquals( "check num errors", 1, reportingDatabase.getNumFailures() ); - assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() ); - assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() ); + checkCounts( listener, 1, 0 ); + } + + private void checkCounts( MockConversionListener listener, int failures, int warnings ) + { + int actualFailures = listener.getErrorMessageCount(); + int actualWarnings = listener.getWarningMessageCount(); + + if ( ( failures != actualFailures ) || ( warnings != actualWarnings ) ) + { + fail( "Check Results Counts expected:<" + failures + "," + warnings + "> but was:<" + actualFailures + "," + + actualWarnings + ">" ); + } } private String getI18nString( String key ) { return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key ); } - - private Result getFailure() + + private void assertHasWarningReason( MockConversionListener listener, String reason ) { - ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next(); - return (Result) artifact.getFailures().get( 0 ); + assertHasMessage( listener.getWarnings(), "warning", reason ); } - private Result getWarning() + private void assertHasErrorReason( MockConversionListener listener, String reason ) { - ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next(); - return (Result) artifact.getWarnings().get( 0 ); + assertHasMessage( listener.getErrors(), "error", reason ); + } + + private void assertHasMessage( Map map, String type, String message ) + { + if ( ( map == null ) || ( map.isEmpty() ) ) + { + fail( "No " + type + "s captured, expected " + type + " <" + message + ">" ); + } + + // Attempt to find the message ... + for ( Iterator it = map.values().iterator(); it.hasNext(); ) + { + List msgList = (List) it.next(); + + if ( msgList.contains( message ) ) + { + // Found it! + return; + } + } + + // Didn't find it! whoops ... + for ( Iterator it = map.entrySet().iterator(); it.hasNext(); ) + { + Map.Entry entry = (Entry) it.next(); + String key = (String) entry.getKey(); + List msgList = (List) entry.getValue(); + + System.err.println( " Artifact: " + key ); + + for ( Iterator itMsgs = msgList.iterator(); itMsgs.hasNext(); ) + { + String msg = (String) itMsgs.next(); + System.err.println( " " + msg ); + } + } + + fail( "Unable to find " + type + " reason <" + message + "> in any artifact." ); } private void createModernSourceRepository() @@ -974,7 +1082,7 @@ public class RepositoryConverterTest ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" ); File sourceBase = getTestFile( "src/test/source-modern-repository" ); - sourceRepository = - factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null ); + sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, + null ); } } diff --git a/archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java new file mode 100644 index 000000000..9274035d8 --- /dev/null +++ b/archiva-converter/src/test/java/org/apache/maven/archiva/converter/transaction/AllTests.java @@ -0,0 +1,44 @@ +package org.apache.maven.archiva.converter.transaction; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter.transaction" ); + //$JUnit-BEGIN$ + suite.addTestSuite( CreateFileEventTest.class ); + suite.addTestSuite( CopyFileEventTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-converter/src/test/resources/log4j.properties b/archiva-converter/src/test/resources/log4j.properties new file mode 100644 index 000000000..9b2c26ade --- /dev/null +++ b/archiva-converter/src/test/resources/log4j.properties @@ -0,0 +1,10 @@ +# Set root logger level to DEBUG and its only appender to A1. +log4j.rootLogger=WARN, A1 + +# A1 is set to be a ConsoleAppender. +log4j.appender.A1=org.apache.log4j.ConsoleAppender + +# A1 uses PatternLayout. +log4j.appender.A1.layout=org.apache.log4j.PatternLayout +log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n + diff --git a/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml b/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml index 1007fd835..b1cf9640a 100644 --- a/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml +++ b/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml @@ -16,10 +16,11 @@ ~ KIND, either express or implied. See the License for the ~ specific language governing permissions and limitations ~ under the License. - --> +--> + org.apache.maven.archiva.converter.RepositoryConverter org.apache.maven.archiva.converter.DefaultRepositoryConverter @@ -36,10 +37,6 @@ org.apache.maven.artifact.factory.ArtifactFactory artifactFactory - - org.apache.maven.model.converter.ArtifactPomRewriter - rewriter - org.codehaus.plexus.i18n.I18N i18n @@ -62,15 +59,67 @@ org.apache.maven.artifact.factory.ArtifactFactory artifactFactory - - org.apache.maven.model.converter.ArtifactPomRewriter - rewriter - org.codehaus.plexus.i18n.I18N i18n + + + + org.codehaus.plexus.jdo.JdoFactory + archiva + org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory + + + + org.hsqldb.jdbcDriver + jdbc:hsqldb:mem:TESTDB + sa + + + + org.jpox.PersistenceManagerFactoryImpl + + + javax.jdo.PersistenceManagerFactoryClass + org.jpox.PersistenceManagerFactoryImpl + + + org.jpox.autoCreateSchema + true + + + org.jpox.validateTables + false + + + org.jpox.validateConstraints + false + + + org.jpox.validateColumns + false + + + org.jpox.autoStartMechanism + None + + + org.jpox.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.poid.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.rdbms.dateTimezone + JDK_DEFAULT_TIMEZONE + + + + diff --git a/archiva-core/pom.xml b/archiva-core/pom.xml index da35be2b9..ed730ec37 100644 --- a/archiva-core/pom.xml +++ b/archiva-core/pom.xml @@ -33,10 +33,6 @@ org.apache.maven.archiva archiva-configuration
- - org.apache.maven.archiva - archiva-converter - org.apache.maven.archiva archiva-discoverer @@ -78,6 +74,13 @@ 1.0-alpha-1 test + + + hsqldb + hsqldb + 1.7.3.3 + test + org.codehaus.plexus @@ -88,16 +91,22 @@ - org.codehaus.mojo - cobertura-maven-plugin - - - - - **/** - - - + org.codehaus.plexus + plexus-maven-plugin + + + merge + + merge-descriptors + + + + ${basedir}/src/main/resources/META-INF/plexus/components.xml + ${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml + + + + diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java deleted file mode 100644 index 940b6bde2..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifact.java +++ /dev/null @@ -1,76 +0,0 @@ -package org.apache.maven.archiva.artifact; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; - -import java.util.HashMap; -import java.util.Map; - -/** - * ManagedArtifact - * - * @author Joakim Erdfelt - * @version $Id$ - */ -public class ManagedArtifact -{ - private String repositoryId; - - private Artifact artifact; - - private String path; - - protected Map attached; - - public ManagedArtifact( String repoId, Artifact artifact, String path ) - { - super(); - this.repositoryId = repoId; - this.artifact = artifact; - this.path = path; - this.attached = new HashMap(); - } - - public Artifact getArtifact() - { - return artifact; - } - - public String getPath() - { - return path; - } - - public String getRepositoryId() - { - return repositoryId; - } - - public Map getAttached() - { - return attached; - } - - public void setAttached( Map attached ) - { - this.attached = attached; - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java deleted file mode 100644 index 6cccfcd6a..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedArtifactTypes.java +++ /dev/null @@ -1,81 +0,0 @@ -package org.apache.maven.archiva.artifact; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.lang.StringUtils; - -import java.util.ArrayList; -import java.util.List; - -/** - * ManagedArtifactTypes - provides place to test an unknown artifact type. - * - * @author Joakim Erdfelt - * @version $Id$ - */ -public class ManagedArtifactTypes -{ - public static final int GENERIC = 0; - - public static final int JAVA = 1; - - public static final int EJB = 2; - - private static List javaArtifacts; - - private static List ejbArtifacts; - - static - { - javaArtifacts = new ArrayList(); - javaArtifacts.add( "jar" ); - javaArtifacts.add( "war" ); - javaArtifacts.add( "sar" ); - javaArtifacts.add( "rar" ); - javaArtifacts.add( "ear" ); - - ejbArtifacts = new ArrayList(); - ejbArtifacts.add( "ejb" ); - ejbArtifacts.add( "ejb-client" ); - } - - public static int whichType( String type ) - { - if ( StringUtils.isBlank( type ) ) - { - // TODO: is an empty type even possible? - return GENERIC; - } - - type = type.toLowerCase(); - - if ( ejbArtifacts.contains( type ) ) - { - return EJB; - } - - if ( javaArtifacts.contains( type ) ) - { - return JAVA; - } - - return GENERIC; - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java deleted file mode 100644 index eacf1cbdf..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedEjbArtifact.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.apache.maven.archiva.artifact; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; - -/** - * ManagedEjbArtifact - adds the ability to reference the ejb-client jar too. - * - * @author Joakim Erdfelt - * @version $Id$ - */ -public class ManagedEjbArtifact - extends ManagedJavaArtifact -{ - public static final String CLIENT = "client"; - - public ManagedEjbArtifact( String repoId, Artifact artifact, String path ) - { - super( repoId, artifact, path ); - } - - public String getClientPath() - { - return (String) super.attached.get( CLIENT ); - } - - public void setClientPath( String clientPath ) - { - super.attached.put( CLIENT, clientPath ); - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java b/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java deleted file mode 100644 index babb88420..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/artifact/ManagedJavaArtifact.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.apache.maven.archiva.artifact; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; - -/** - * ManagedJavaArtifact - a ManagedArtifact with optional javadoc and source - * reference jars. - * - * @author Joakim Erdfelt - * @version $Id$ - */ -public class ManagedJavaArtifact - extends ManagedArtifact -{ - public static final String JAVADOC = "javadoc"; - - public static final String SOURCES = "sources"; - - public ManagedJavaArtifact( String repoId, Artifact artifact, String path ) - { - super( repoId, artifact, path ); - } - - public String getJavadocPath() - { - return (String) super.attached.get( JAVADOC ); - } - - public void setJavadocPath( String javadocPath ) - { - super.attached.put( JAVADOC, javadocPath ); - } - - public String getSourcesPath() - { - return (String) super.attached.get( SOURCES ); - } - - public void setSourcesPath( String sourcesPath ) - { - super.attached.put( SOURCES, sourcesPath ); - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java new file mode 100644 index 000000000..7b6d15ffb --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/ArtifactHealthConsumer.java @@ -0,0 +1,97 @@ +package org.apache.maven.archiva.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; +import org.apache.maven.archiva.reporting.group.ReportGroup; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.InvalidArtifactRTException; +import org.apache.maven.model.Model; +import org.apache.maven.project.MavenProject; +import org.apache.maven.project.MavenProjectBuilder; +import org.apache.maven.project.ProjectBuildingException; + +import java.util.Collections; + +/** + * ArtifactHealthConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer" + * role-hint="artifact-health" + * instantiation-strategy="per-lookup" + */ +public class ArtifactHealthConsumer + extends GenericArtifactConsumer +{ + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase database; + + /** + * @plexus.requirement role-hint="health" + */ + private ReportGroup health; + + /** + * @plexus.requirement + */ + private MavenProjectBuilder projectBuilder; + + public void processArtifact( Artifact artifact, BaseFile file ) + { + Model model = null; + try + { + Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact + .getArtifactId(), artifact.getVersion() ); + MavenProject project = projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository ); + + model = project.getModel(); + } + catch ( InvalidArtifactRTException e ) + { + database.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e ); + } + catch ( ProjectBuildingException e ) + { + database.addWarning( artifact, null, null, "Error reading project model: " + e ); + } + + database.remove( artifact ); + health.processArtifact( artifact, model ); + } + + public void processFileProblem( BaseFile path, String message ) + { + /* do nothing here (yet) */ + // TODO: store build failure into database? + } + + public String getName() + { + return "Artifact Health Consumer"; + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java new file mode 100644 index 000000000..2d7026bb1 --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/IndexArtifactConsumer.java @@ -0,0 +1,99 @@ +package org.apache.maven.archiva.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.GenericArtifactConsumer; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.archiva.configuration.ArchivaConfiguration; +import org.apache.maven.archiva.configuration.Configuration; +import org.apache.maven.archiva.indexer.RepositoryArtifactIndex; +import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory; +import org.apache.maven.archiva.indexer.RepositoryIndexException; +import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory; +import org.apache.maven.artifact.Artifact; +import org.apache.maven.artifact.repository.ArtifactRepository; + +import java.io.File; + +/** + * IndexArtifactConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer" + * role-hint="index-artifact" + * instantiation-strategy="per-lookup" + */ +public class IndexArtifactConsumer + extends GenericArtifactConsumer +{ + /** + * @plexus.requirement + */ + private RepositoryArtifactIndexFactory indexFactory; + + /** + * @plexus.requirement role-hint="standard" + */ + private RepositoryIndexRecordFactory recordFactory; + + /** + * Configuration store. + * + * @plexus.requirement + */ + private ArchivaConfiguration archivaConfiguration; + + private RepositoryArtifactIndex index; + + public boolean init( ArtifactRepository repository ) + { + Configuration configuration = archivaConfiguration.getConfiguration(); + + File indexPath = new File( configuration.getIndexPath() ); + + index = indexFactory.createStandardIndex( indexPath ); + + return super.init( repository ); + } + + public void processArtifact( Artifact artifact, BaseFile file ) + { + try + { + index.indexArtifact( artifact, recordFactory ); + } + catch ( RepositoryIndexException e ) + { + getLogger().warn( "Unable to index artifact " + artifact, e ); + } + } + + public void processFileProblem( BaseFile path, String message ) + { + + } + + public String getName() + { + return "Index Artifact Consumer"; + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java new file mode 100644 index 000000000..d858d32cd --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/consumers/RepositoryMetadataHealthConsumer.java @@ -0,0 +1,69 @@ +package org.apache.maven.archiva.consumers; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.GenericRepositoryMetadataConsumer; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase; +import org.apache.maven.archiva.reporting.group.ReportGroup; +import org.apache.maven.archiva.reporting.model.MetadataResults; +import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; + +/** + * RepositoryMetadataHealthConsumer + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumer" + * role-hint="metadata-health" + * instantiation-strategy="per-lookup" + */ +public class RepositoryMetadataHealthConsumer + extends GenericRepositoryMetadataConsumer +{ + /** + * @plexus.requirement + */ + private MetadataResultsDatabase database; + + /** + * @plexus.requirement role-hint="health" + */ + private ReportGroup health; + + public void processRepositoryMetadata( RepositoryMetadata metadata, BaseFile file ) + { + MetadataResults results = database.getMetadataResults( metadata ); + database.clearResults( results ); + + health.processMetadata( metadata, repository ); + } + + public void processFileProblem( BaseFile path, String message ) + { + + } + + public String getName() + { + return "RepositoryMetadata Health Consumer"; + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java b/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java deleted file mode 100644 index 7beb18a99..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/DefaultLegacyRepositoryConverter.java +++ /dev/null @@ -1,126 +0,0 @@ -package org.apache.maven.archiva.conversion; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.converter.RepositoryConversionException; -import org.apache.maven.archiva.converter.RepositoryConverter; -import org.apache.maven.archiva.discoverer.ArtifactDiscoverer; -import org.apache.maven.archiva.discoverer.DiscovererException; -import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter; -import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.store.ReportingStore; -import org.apache.maven.archiva.reporting.store.ReportingStoreException; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; -import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -import java.io.File; -import java.net.MalformedURLException; -import java.util.List; - -/** - * @author Jason van Zyl - * @plexus.component - * @todo turn this into a general conversion component and hide all this crap here. - * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies - */ -public class DefaultLegacyRepositoryConverter - implements LegacyRepositoryConverter -{ - /** - * @plexus.requirement role-hint="legacy" - */ - private ArtifactDiscoverer artifactDiscoverer; - - /** - * @plexus.requirement role-hint="legacy" - */ - private ArtifactRepositoryLayout legacyLayout; - - /** - * @plexus.requirement role-hint="default" - */ - private ArtifactRepositoryLayout defaultLayout; - - /** - * @plexus.requirement - */ - private ArtifactRepositoryFactory artifactRepositoryFactory; - - /** - * @plexus.requirement - */ - private RepositoryConverter repositoryConverter; - - /** - * @plexus.requirement - */ - private ReportingStore reportingStore; - - /** - * @plexus.requirement role-hint="health" - */ - private ReportGroup reportGroup; - - public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, - List blacklistedPatterns, boolean includeSnapshots ) - throws RepositoryConversionException, DiscovererException - { - ArtifactRepository legacyRepository; - - ArtifactRepository repository; - - try - { - legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", - legacyRepositoryDirectory.toURI().toURL().toString(), - legacyLayout, null, null ); - - repository = artifactRepositoryFactory.createArtifactRepository( "default", - repositoryDirectory.toURI().toURL().toString(), - defaultLayout, null, null ); - } - catch ( MalformedURLException e ) - { - throw new RepositoryConversionException( "Error convering legacy repository.", e ); - } - - ArtifactFilter filter = - includeSnapshots ? new AcceptAllArtifactFilter() : (ArtifactFilter) new SnapshotArtifactFilter(); - List legacyArtifacts = artifactDiscoverer.discoverArtifacts( legacyRepository, blacklistedPatterns, filter ); - - ReportingDatabase reporter; - try - { - reporter = reportingStore.getReportsFromStore( repository, reportGroup ); - - repositoryConverter.convert( legacyArtifacts, repository, reporter ); - - reportingStore.storeReports( reporter, repository ); - } - catch ( ReportingStoreException e ) - { - throw new RepositoryConversionException( "Error convering legacy repository.", e ); - } - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java b/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java deleted file mode 100644 index 876421475..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/conversion/LegacyRepositoryConverter.java +++ /dev/null @@ -1,47 +0,0 @@ -package org.apache.maven.archiva.conversion; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.converter.RepositoryConversionException; -import org.apache.maven.archiva.discoverer.DiscovererException; - -import java.io.File; -import java.util.List; - -/** - * @author Jason van Zyl - */ -public interface LegacyRepositoryConverter -{ - String ROLE = LegacyRepositoryConverter.class.getName(); - - /** - * Convert a legacy repository to a modern repository. This means a Maven 1.x repository - * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs. - * - * @param legacyRepositoryDirectory - * @param repositoryDirectory - * @throws org.apache.maven.archiva.converter.RepositoryConversionException - * - */ - void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, List blacklistedPatterns, - boolean includeSnapshots ) - throws RepositoryConversionException, DiscovererException; -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java b/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java index 554fb3491..0bac31018 100644 --- a/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java +++ b/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java @@ -19,7 +19,7 @@ package org.apache.maven.archiva.repositories; * under the License. */ -import org.apache.maven.archiva.artifact.ManagedArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact; import org.apache.maven.archiva.configuration.RepositoryConfiguration; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -31,7 +31,7 @@ import java.util.List; /** * ActiveManagedRepositories * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public interface ActiveManagedRepositories @@ -44,12 +44,27 @@ public interface ActiveManagedRepositories * @param id the ID of the repository. * @return the ArtifactRepository associated with the provided ID, or null if none found. */ - ArtifactRepository getArtifactRepository( String id ); + public ArtifactRepository getArtifactRepository( String id ); - List getAllArtifactRepositories(); + /** + * Get the List of active managed repositories as a List of {@link ArtifactRepository} objects. + * + * @return the list of ArtifactRepository objects. + */ + public List /**/getAllArtifactRepositories(); RepositoryConfiguration getRepositoryConfiguration( String id ); + /** + * Providing only a groupId, artifactId, and version, return the MavenProject that + * is found, in any managed repository. + * + * @param groupId the groupId to search for + * @param artifactId the artifactId to search for + * @param version the version to search for + * @return the MavenProject from the provided parameters. + * @throws ProjectBuildingException if there was a problem building the maven project object. + */ MavenProject findProject( String groupId, String artifactId, String version ) throws ProjectBuildingException; @@ -59,4 +74,20 @@ public interface ActiveManagedRepositories ManagedArtifact findArtifact( String groupId, String artifactId, String version, String type ); ManagedArtifact findArtifact( Artifact artifact ); + + /** + * Obtain the last data refresh timestamp for all Managed Repositories. + * + * @return the last data refresh timestamp. + */ + long getLastDataRefreshTime(); + + /** + * Tests to see if there needs to be a data refresh performed. + * + * The only valid scenario is if 1 or more repositories have not had their data refreshed ever. + * + * @return true if there needs to be a data refresh. + */ + boolean needsDataRefresh(); } diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java b/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java index 128a91d66..6b2504783 100644 --- a/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java +++ b/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java @@ -20,14 +20,15 @@ package org.apache.maven.archiva.repositories; */ import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.artifact.ManagedArtifact; -import org.apache.maven.archiva.artifact.ManagedArtifactTypes; -import org.apache.maven.archiva.artifact.ManagedEjbArtifact; -import org.apache.maven.archiva.artifact.ManagedJavaArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedArtifactTypes; +import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact; import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.Configuration; import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory; import org.apache.maven.archiva.configuration.RepositoryConfiguration; +import org.apache.maven.archiva.discoverer.DiscovererStatistics; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -49,7 +50,7 @@ import java.util.List; /** * DefaultActiveManagedRepositories * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ * @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories" */ @@ -224,6 +225,7 @@ public class DefaultActiveManagedRepositories repositories = repositoryFactory.createRepositories( this.configuration ); localRepository = repositoryFactory.createLocalRepository( this.configuration ); + } private ManagedArtifact createManagedArtifact( ArtifactRepository repository, Artifact artifact, File f ) @@ -283,8 +285,9 @@ public class DefaultActiveManagedRepositories { if ( propertyName.startsWith( "repositories" ) || propertyName.startsWith( "localRepository" ) ) { - getLogger().debug( "Triggering managed repository configuration change with " + propertyName + " set to " + - propertyValue ); + getLogger().debug( + "Triggering managed repository configuration change with " + propertyName + " set to " + + propertyValue ); configureSelf( archivaConfiguration.getConfiguration() ); } else @@ -292,4 +295,39 @@ public class DefaultActiveManagedRepositories getLogger().debug( "Not triggering managed repository configuration change with " + propertyName ); } } + + public long getLastDataRefreshTime() + { + long lastDataRefreshTime = 0; + + for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); ) + { + ArtifactRepository repository = (ArtifactRepository) i.next(); + + DiscovererStatistics stats = new DiscovererStatistics( repository ); + if ( stats.getTimestampFinished() > lastDataRefreshTime ) + { + lastDataRefreshTime = stats.getTimestampFinished(); + } + } + + return lastDataRefreshTime; + } + + public boolean needsDataRefresh() + { + for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); ) + { + ArtifactRepository repository = (ArtifactRepository) i.next(); + + DiscovererStatistics stats = new DiscovererStatistics( repository ); + if ( stats.getTimestampFinished() <= 0 ) + { + // Found a repository that has NEVER had it's data walked. + return true; + } + } + + return false; + } } diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java index 04ab088d4..bae351750 100644 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java @@ -21,11 +21,8 @@ package org.apache.maven.archiva.scheduler; import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.Configuration; -import org.apache.maven.archiva.indexer.RepositoryArtifactIndex; -import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory; -import org.apache.maven.archiva.indexer.RepositoryIndexException; -import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor; -import org.apache.maven.archiva.scheduler.task.IndexerTask; +import org.apache.maven.archiva.repositories.ActiveManagedRepositories; +import org.apache.maven.archiva.scheduler.task.DataRefreshTask; import org.apache.maven.archiva.scheduler.task.RepositoryTask; import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable; @@ -42,7 +39,6 @@ import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.SchedulerException; -import java.io.File; import java.text.ParseException; /** @@ -61,28 +57,23 @@ public class DefaultRepositoryTaskScheduler private Scheduler scheduler; /** - * @plexus.requirement role-hint="indexer" + * @plexus.requirement role-hint="data-refresh" */ - private TaskQueue indexerQueue; - - /** - * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer" - */ - private IndexerTaskExecutor indexerTaskExecutor; + private TaskQueue datarefreshQueue; /** * @plexus.requirement */ private ArchivaConfiguration archivaConfiguration; - + /** * @plexus.requirement */ - private RepositoryArtifactIndexFactory indexFactory; + private ActiveManagedRepositories activeRepositories; private static final String DISCOVERER_GROUP = "DISCOVERER"; - private static final String INDEXER_JOB = "indexerTask"; + private static final String DATA_REFRESH_JOB = "dataRefreshTask"; public void start() throws StartingException @@ -92,11 +83,11 @@ public class DefaultRepositoryTaskScheduler try { - scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() ); + scheduleJobs( configuration.getDataRefreshCronExpression() ); } catch ( ParseException e ) { - throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e ); + throw new StartingException( "Invalid configuration: " + configuration.getDataRefreshCronExpression(), e ); } catch ( SchedulerException e ) { @@ -104,29 +95,22 @@ public class DefaultRepositoryTaskScheduler } } - private void scheduleJobs( String indexPath, String indexerCronExpression ) + private void scheduleJobs( String indexerCronExpression ) throws ParseException, SchedulerException { - if ( indexPath != null ) - { - JobDetail jobDetail = createJobDetail( INDEXER_JOB ); + JobDetail jobDetail = createJobDetail( DATA_REFRESH_JOB ); - getLogger().info( "Scheduling indexer: " + indexerCronExpression ); - CronTrigger trigger = new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression ); - scheduler.scheduleJob( jobDetail, trigger ); + getLogger().info( "Scheduling data-refresh: " + indexerCronExpression ); + CronTrigger trigger = new CronTrigger( DATA_REFRESH_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression ); + scheduler.scheduleJob( jobDetail, trigger ); - try - { - queueNowIfNeeded(); - } - catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e ) - { - getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e ); - } + try + { + queueNowIfNeeded(); } - else + catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e ) { - getLogger().info( "Not scheduling indexer - index path is not configured" ); + getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e ); } } @@ -135,7 +119,7 @@ public class DefaultRepositoryTaskScheduler JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class ); JobDataMap dataMap = new JobDataMap(); - dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue ); + dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue ); dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP ); jobDetail.setJobDataMap( dataMap ); @@ -147,7 +131,7 @@ public class DefaultRepositoryTaskScheduler { try { - scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP ); + scheduler.unscheduleJob( DATA_REFRESH_JOB, DISCOVERER_GROUP ); } catch ( SchedulerException e ) { @@ -163,7 +147,7 @@ public class DefaultRepositoryTaskScheduler public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue ) { - if ( "indexPath".equals( propertyName ) || "indexerCronExpression".equals( propertyName ) ) + if ( "dataRefreshCronExpression".equals( propertyName ) ) { getLogger().debug( "Restarting task scheduler with new configuration after property change: " + propertyName + " to " + propertyValue ); @@ -179,7 +163,7 @@ public class DefaultRepositoryTaskScheduler try { Configuration configuration = archivaConfiguration.getConfiguration(); - scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() ); + scheduleJobs( configuration.getDataRefreshCronExpression() ); } catch ( ParseException e ) { @@ -199,43 +183,27 @@ public class DefaultRepositoryTaskScheduler } } - public void runIndexer() - throws org.apache.maven.archiva.scheduler.TaskExecutionException + public void runDataRefresh() + throws TaskExecutionException { - IndexerTask task = new IndexerTask(); - task.setJobName( "INDEX_INIT" ); + DataRefreshTask task = new DataRefreshTask(); + task.setJobName( "DATA_REFRESH_INIT" ); try { - indexerQueue.put( task ); + datarefreshQueue.put( task ); } catch ( TaskQueueException e ) { - throw new org.apache.maven.archiva.scheduler.TaskExecutionException( e.getMessage(), e ); + throw new TaskExecutionException( e.getMessage(), e ); } } public void queueNowIfNeeded() - throws org.codehaus.plexus.taskqueue.execution.TaskExecutionException + throws TaskExecutionException { - Configuration configuration = archivaConfiguration.getConfiguration(); - - File indexPath = new File( configuration.getIndexPath() ); - - try + if ( activeRepositories.needsDataRefresh() ) { - RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath ); - if ( !artifactIndex.exists() ) - { - runIndexer(); - } - } - catch ( RepositoryIndexException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); - } - catch ( org.apache.maven.archiva.scheduler.TaskExecutionException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); + runDataRefresh(); } } diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java index dc3c26ace..06152f244 100644 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java @@ -19,7 +19,7 @@ package org.apache.maven.archiva.scheduler; * under the License. */ -import org.apache.maven.archiva.scheduler.task.IndexerTask; +import org.apache.maven.archiva.scheduler.task.DataRefreshTask; import org.apache.maven.archiva.scheduler.task.RepositoryTask; import org.codehaus.plexus.scheduler.AbstractJob; import org.codehaus.plexus.taskqueue.TaskQueue; @@ -53,27 +53,27 @@ public class RepositoryTaskJob JobDataMap dataMap = context.getJobDetail().getJobDataMap(); setJobDataMap( dataMap ); - TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE ); + TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE ); String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString(); - RepositoryTask task = new IndexerTask(); + RepositoryTask task = new DataRefreshTask(); task.setJobName( context.getJobDetail().getName() ); try { - if ( indexerQueue.getQueueSnapshot().size() == 0 ) + if ( taskQueue.getQueueSnapshot().size() == 0 ) { - indexerQueue.put( task ); + taskQueue.put( task ); } else { if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) ) { - indexerQueue.put( task ); + taskQueue.put( task ); } else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) ) { - //do not queue anymore, policy is to skip + // do not queue anymore, policy is to skip } } } diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java index c63556ff5..08e511f3e 100644 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskScheduler.java @@ -19,6 +19,8 @@ package org.apache.maven.archiva.scheduler; * under the License. */ +import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; + /** * The component that takes care of scheduling in the application. * @@ -31,7 +33,7 @@ public interface RepositoryTaskScheduler */ String ROLE = RepositoryTaskScheduler.class.getName(); - void runIndexer() + void runDataRefresh() throws TaskExecutionException; -} +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java deleted file mode 100644 index 51643445a..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/TaskExecutionException.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.apache.maven.archiva.scheduler; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Exception occurring during task execution. - * - * @author Brett Porter - */ -public class TaskExecutionException - extends Exception -{ - public TaskExecutionException( String message, Throwable t ) - { - super( message, t ); - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java new file mode 100644 index 000000000..783a44c27 --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshConsumers.java @@ -0,0 +1,61 @@ +package org.apache.maven.archiva.scheduler.executors; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * Mutable list of consumer for the Data Refresh. + * + * NOTE: This class only exists to minimize the requirements of manual component management. + * This approach allows for a small and simple component definition in the application.xml + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers" + */ +public class DataRefreshConsumers +{ + /** + * @plexus.configuration + */ + private List consumerNames; + + public List getConsumerNames() + { + if ( consumerNames == null ) + { + consumerNames = new ArrayList(); + consumerNames.add( "index-artifact" ); + consumerNames.add( "artifact-health" ); + consumerNames.add( "metadata-health" ); + } + + return consumerNames; + } + + public Iterator iterator() + { + return getConsumerNames().iterator(); + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java new file mode 100644 index 000000000..939277235 --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutor.java @@ -0,0 +1,192 @@ +package org.apache.maven.archiva.scheduler.executors; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.Consumer; +import org.apache.maven.archiva.common.consumers.ConsumerException; +import org.apache.maven.archiva.common.consumers.ConsumerFactory; +import org.apache.maven.archiva.configuration.ArchivaConfiguration; +import org.apache.maven.archiva.configuration.Configuration; +import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory; +import org.apache.maven.archiva.configuration.RepositoryConfiguration; +import org.apache.maven.archiva.discoverer.Discoverer; +import org.apache.maven.archiva.discoverer.DiscovererException; +import org.apache.maven.archiva.discoverer.DiscovererStatistics; +import org.apache.maven.archiva.scheduler.task.DataRefreshTask; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.AbstractLogEnabled; +import org.codehaus.plexus.taskqueue.Task; +import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; +import org.codehaus.plexus.taskqueue.execution.TaskExecutor; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * DataRefreshExecutor + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" + * role-hint="data-refresh" + */ +public class DataRefreshExecutor + extends AbstractLogEnabled + implements TaskExecutor +{ + /** + * Configuration store. + * + * @plexus.requirement + */ + private ArchivaConfiguration archivaConfiguration; + + /** + * @plexus.requirement + */ + private ConfiguredRepositoryFactory repoFactory; + + /** + * @plexus.requirement + */ + private DataRefreshConsumers consumerNames; + + /** + * @plexus.requirement + */ + private Discoverer discoverer; + + /** + * @plexus.requirement + */ + private ConsumerFactory consumerFactory; + + public void executeTask( Task task ) + throws TaskExecutionException + { + DataRefreshTask indexerTask = (DataRefreshTask) task; + + getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() ); + + execute(); + } + + public void execute() + throws TaskExecutionException + { + Configuration configuration = archivaConfiguration.getConfiguration(); + + List consumers = new ArrayList(); + + for ( Iterator it = consumerNames.iterator(); it.hasNext(); ) + { + String name = (String) it.next(); + try + { + Consumer consumer = consumerFactory.createConsumer( name ); + consumers.add( consumer ); + } + catch ( ConsumerException e ) + { + getLogger().warn( e.getMessage(), e ); + throw new TaskExecutionException( e.getMessage(), e ); + } + } + + long time = System.currentTimeMillis(); + + for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); ) + { + RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next(); + + if ( !repositoryConfiguration.isIndexed() ) + { + continue; + } + + ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration ); + + List filteredConsumers = filterConsumers( consumers, repository ); + + DiscovererStatistics lastRunStats = new DiscovererStatistics( repository ); + try + { + lastRunStats.load( ".datarefresh" ); + } + catch ( IOException e ) + { + getLogger().info( + "Unable to load last run statistics for repository [" + repository.getId() + "]: " + + e.getMessage() ); + } + + try + { + DiscovererStatistics stats = discoverer + .walkRepository( repository, filteredConsumers, repositoryConfiguration.isIncludeSnapshots(), + lastRunStats.getTimestampFinished(), null, null ); + + stats.dump( getLogger() ); + } + catch ( DiscovererException e ) + { + getLogger().error( + "Unable to run data refresh against repository [" + repository.getId() + "]: " + + e.getMessage(), e ); + } + } + + time = System.currentTimeMillis() - time; + + getLogger().info( "Finished data refresh process in " + time + "ms." ); + } + + /** + * Not all consumers work with all repositories. + * This will filter out those incompatible consumers based on the provided repository. + * + * @param consumers the initial list of consumers. + * @param repository the repository to test consumer against. + * @return the filtered list of consumers. + */ + private List filterConsumers( List consumers, ArtifactRepository repository ) + { + List filtered = new ArrayList(); + + for ( Iterator it = consumers.iterator(); it.hasNext(); ) + { + Consumer consumer = (Consumer) it.next(); + if ( consumer.init( repository ) ) + { + // Approved! + filtered.add( consumer ); + } + else + { + getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository ); + } + } + + return filtered; + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java deleted file mode 100644 index 25d390592..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutor.java +++ /dev/null @@ -1,317 +0,0 @@ -package org.apache.maven.archiva.scheduler.executors; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.configuration.ArchivaConfiguration; -import org.apache.maven.archiva.configuration.Configuration; -import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory; -import org.apache.maven.archiva.configuration.RepositoryConfiguration; -import org.apache.maven.archiva.discoverer.ArtifactDiscoverer; -import org.apache.maven.archiva.discoverer.DiscovererException; -import org.apache.maven.archiva.discoverer.MetadataDiscoverer; -import org.apache.maven.archiva.discoverer.filter.MetadataFilter; -import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter; -import org.apache.maven.archiva.indexer.RepositoryArtifactIndex; -import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory; -import org.apache.maven.archiva.indexer.RepositoryIndexException; -import org.apache.maven.archiva.indexer.record.IndexRecordExistsArtifactFilter; -import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.executor.ReportExecutor; -import org.apache.maven.archiva.reporting.filter.ReportingMetadataFilter; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.store.ReportingStoreException; -import org.apache.maven.archiva.scheduler.task.IndexerTask; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.resolver.filter.AndArtifactFilter; -import org.apache.maven.project.MavenProjectBuilder; -import org.codehaus.plexus.logging.AbstractLogEnabled; -import org.codehaus.plexus.taskqueue.Task; -import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; -import org.codehaus.plexus.taskqueue.execution.TaskExecutor; - -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * @author Edwin Punzalan - * @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer" - */ -public class IndexerTaskExecutor - extends AbstractLogEnabled - implements TaskExecutor -{ - /** - * Configuration store. - * - * @plexus.requirement - */ - private ArchivaConfiguration archivaConfiguration; - - /** - * @plexus.requirement - */ - private RepositoryArtifactIndexFactory indexFactory; - - /** - * @plexus.requirement - */ - private ConfiguredRepositoryFactory repoFactory; - - /** - * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" - */ - private Map artifactDiscoverers; - - /** - * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" - */ - private Map metadataDiscoverers; - - /** - * @plexus.requirement role-hint="standard" - */ - private RepositoryIndexRecordFactory recordFactory; - - /** - * @plexus.requirement - */ - private ReportExecutor reportExecutor; - - /** - * @plexus.requirement role-hint="health" - */ - private ReportGroup reportGroup; - - private long lastIndexingTime = 0; - - private static final int ARTIFACT_BUFFER_SIZE = 1000; - - public long getLastIndexingTime() - { - return lastIndexingTime; - } - - public void executeTask( Task task ) - throws TaskExecutionException - { - IndexerTask indexerTask = (IndexerTask) task; - - getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() ); - - execute(); - } - - public void execute() - throws TaskExecutionException - { - Configuration configuration = archivaConfiguration.getConfiguration(); - - File indexPath = new File( configuration.getIndexPath() ); - - execute( configuration, indexPath ); - } - - public void executeNowIfNeeded() - throws TaskExecutionException - { - Configuration configuration = archivaConfiguration.getConfiguration(); - - File indexPath = new File( configuration.getIndexPath() ); - - try - { - RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath ); - if ( !artifactIndex.exists() ) - { - execute( configuration, indexPath ); - } - } - catch ( RepositoryIndexException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); - } - } - - private void execute( Configuration configuration, File indexPath ) - throws TaskExecutionException - { - long time = System.currentTimeMillis(); - getLogger().info( "Starting repository indexing process" ); - - RepositoryArtifactIndex index = indexFactory.createStandardIndex( indexPath ); - - try - { - Collection keys; - if ( index.exists() ) - { - keys = index.getAllRecordKeys(); - } - else - { - keys = Collections.EMPTY_LIST; - } - - for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); ) - { - RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next(); - - if ( repositoryConfiguration.isIndexed() ) - { - List blacklistedPatterns = new ArrayList(); - if ( repositoryConfiguration.getBlackListPatterns() != null ) - { - blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() ); - } - if ( configuration.getGlobalBlackListPatterns() != null ) - { - blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() ); - } - boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots(); - - ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration ); - ReportingDatabase reporter = reportExecutor.getReportDatabase( repository, reportGroup ); - - // keep original value in case there is another process under way - long origStartTime = reporter.getStartTime(); - reporter.setStartTime( System.currentTimeMillis() ); - - // Discovery process - String layoutProperty = repositoryConfiguration.getLayout(); - ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty ); - AndArtifactFilter filter = new AndArtifactFilter(); - filter.add( new IndexRecordExistsArtifactFilter( keys ) ); - if ( !includeSnapshots ) - { - filter.add( new SnapshotArtifactFilter() ); - } - - // Save some memory by not tracking paths we won't use - // TODO: Plexus CDC should be able to inject this configuration - discoverer.setTrackOmittedPaths( false ); - - getLogger().info( "Searching repository " + repositoryConfiguration.getName() ); - List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter ); - - if ( !artifacts.isEmpty() ) - { - getLogger().info( "Discovered " + artifacts.size() + " unindexed artifacts" ); - - // Work through these in batches, then flush the project cache. - for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE ) - { - int end = j + ARTIFACT_BUFFER_SIZE; - List currentArtifacts = - artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end ); - - // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal) - - // run the reports. Done intermittently to avoid losing track of what is indexed since - // that is what the filter is based on. - reportExecutor.runArtifactReports( reportGroup, currentArtifacts, repository ); - - index.indexArtifacts( currentArtifacts, recordFactory ); - - // MRM-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack - // around that. TODO: remove when it is configurable - flushProjectBuilderCacheHack(); - } - } - - MetadataFilter metadataFilter = new ReportingMetadataFilter( reporter ); - - MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers - .get( layoutProperty ); - List metadata = - metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, metadataFilter ); - - if ( !metadata.isEmpty() ) - { - getLogger().info( "Discovered " + metadata.size() + " unprocessed metadata files" ); - - // run the reports - reportExecutor.runMetadataReports( reportGroup, metadata, repository ); - } - - reporter.setStartTime( origStartTime ); - } - } - } - catch ( RepositoryIndexException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); - } - catch ( DiscovererException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); - } - catch ( ReportingStoreException e ) - { - throw new TaskExecutionException( e.getMessage(), e ); - } - - time = System.currentTimeMillis() - time; - lastIndexingTime = System.currentTimeMillis(); - getLogger().info( "Finished repository indexing process in " + time + "ms" ); - } - - /** - * @todo remove when no longer needed (MRM-142) - * @plexus.requirement - */ - private MavenProjectBuilder projectBuilder; - - private void flushProjectBuilderCacheHack() - { - try - { - if ( projectBuilder != null ) - { - getLogger().info( "projectBuilder is type " + projectBuilder.getClass().getName() ); - - java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" ); - f.setAccessible( true ); - Map cache = (Map) f.get( projectBuilder ); - getLogger().info( "projectBuilder.raw is type " + cache.getClass().getName() ); - cache.clear(); - - f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" ); - f.setAccessible( true ); - cache = (Map) f.get( projectBuilder ); - getLogger().info( "projectBuilder.processed is type " + cache.getClass().getName() ); - cache.clear(); - } - } - catch ( NoSuchFieldException e ) - { - throw new RuntimeException( e ); - } - catch ( IllegalAccessException e ) - { - throw new RuntimeException( e ); - } - } -} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java new file mode 100644 index 000000000..57d4b683e --- /dev/null +++ b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/DataRefreshTask.java @@ -0,0 +1,41 @@ +package org.apache.maven.archiva.scheduler.task; + +/** + * DataRefreshTask - task for discovering changes in the repository + * and updating all associated data. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class DataRefreshTask + implements RepositoryTask +{ + private String jobName; + + private String policy; + + public String getJobName() + { + return jobName; + } + + public String getQueuePolicy() + { + return policy; + } + + public void setJobName( String jobName ) + { + this.jobName = jobName; + } + + public void setQueuePolicy( String policy ) + { + this.policy = policy; + } + + public long getMaxExecutionTime() + { + return 0; + } +} diff --git a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java b/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java deleted file mode 100644 index a4cd2f612..000000000 --- a/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/task/IndexerTask.java +++ /dev/null @@ -1,60 +0,0 @@ -package org.apache.maven.archiva.scheduler.task; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Task for discovering changes in the repository and updating the index accordingly. - * - * @author Brett Porter - */ -public class IndexerTask - implements RepositoryTask -{ - private String jobName; - - private String policy; - - public long getMaxExecutionTime() - { - return 0; - } - - public String getJobName() - { - return jobName; - } - - public String getQueuePolicy() - { - return policy; - } - - public void setQueuePolicy( String policy ) - { - this.policy = policy; - } - - public void setJobName( String jobName ) - { - this.jobName = jobName; - } - - -} diff --git a/archiva-core/src/main/resources/META-INF/plexus/components.xml b/archiva-core/src/main/resources/META-INF/plexus/components.xml index 1fe09e4ff..cbf418cb4 100644 --- a/archiva-core/src/main/resources/META-INF/plexus/components.xml +++ b/archiva-core/src/main/resources/META-INF/plexus/components.xml @@ -21,9 +21,67 @@ + + + + + + org.apache.maven.archiva.common.consumers.Consumer + index-artifact + org.apache.maven.archiva.consumers.IndexArtifactConsumer + per-lookup + + + org.apache.maven.artifact.factory.ArtifactFactory + artifactFactory + + + + + + org.apache.maven.archiva.common.consumers.Consumer + artifact-health + org.apache.maven.archiva.consumers.ArtifactHealthConsumer + per-lookup + + + org.apache.maven.artifact.factory.ArtifactFactory + artifactFactory + + + + + + org.apache.maven.archiva.common.consumers.Consumer + metadata-health + org.apache.maven.archiva.consumers.RepositoryMetadataHealthConsumer + per-lookup + + + org.apache.maven.artifact.factory.ArtifactFactory + artifactFactory + + + + + + + + org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers + org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers + Mutable list of consumer for the Data Refresh. + + + index-artifact + artifact-health + metadata-health + + + + org.codehaus.plexus.taskqueue.TaskQueue - indexer + data-refresh org.codehaus.plexus.taskqueue.DefaultTaskQueue plexus-configurable @@ -38,20 +96,20 @@ org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor - indexer + data-refresh org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor org.codehaus.plexus.taskqueue.execution.TaskExecutor - indexer + data-refresh org.codehaus.plexus.taskqueue.TaskQueue - indexer + data-refresh - indexer + data-refresh diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java new file mode 100644 index 000000000..e037e09c3 --- /dev/null +++ b/archiva-core/src/test/java/org/apache/maven/archiva/AllTests.java @@ -0,0 +1,44 @@ +package org.apache.maven.archiva; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests - added to allow IDE users to pull all tests into their tool. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva" ); + //$JUnit-BEGIN$ + suite.addTest( org.apache.maven.archiva.repositories.AllTests.suite() ); + suite.addTest( org.apache.maven.archiva.scheduler.executors.AllTests.suite() ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java deleted file mode 100644 index bd13a7deb..000000000 --- a/archiva-core/src/test/java/org/apache/maven/archiva/LegacyRepositoryConverterTest.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.apache.maven.archiva; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.conversion.LegacyRepositoryConverter; -import org.codehaus.plexus.PlexusTestCase; - -import java.io.File; - -/** - * @author Jason van Zyl - */ -public class LegacyRepositoryConverterTest - extends PlexusTestCase -{ - public void testLegacyRepositoryConversion() - throws Exception - { - File legacyRepositoryDirectory = getTestFile( "src/test/maven-1.x-repository" ); - - File repositoryDirectory = getTestFile( "target/maven-2.x-repository" ); - - LegacyRepositoryConverter rm = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE ); - - rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, null, true ); - } -} diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java new file mode 100644 index 000000000..0b82640b9 --- /dev/null +++ b/archiva-core/src/test/java/org/apache/maven/archiva/repositories/AllTests.java @@ -0,0 +1,43 @@ +package org.apache.maven.archiva.repositories; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.repositories" ); + //$JUnit-BEGIN$ + suite.addTestSuite( DefaultActiveManagedRepositoriesTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java index a535307e2..f4e89eadf 100644 --- a/archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java +++ b/archiva-core/src/test/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositoriesTest.java @@ -19,15 +19,15 @@ package org.apache.maven.archiva.repositories; * under the License. */ -import org.apache.maven.archiva.artifact.ManagedArtifact; -import org.apache.maven.archiva.artifact.ManagedEjbArtifact; -import org.apache.maven.archiva.artifact.ManagedJavaArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact; import org.codehaus.plexus.PlexusTestCase; /** * DefaultActiveManagedRepositoriesTest * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class DefaultActiveManagedRepositoriesTest diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java new file mode 100644 index 000000000..9fdfcc15b --- /dev/null +++ b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/AllTests.java @@ -0,0 +1,43 @@ +package org.apache.maven.archiva.scheduler.executors; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.scheduler.executors" ); + //$JUnit-BEGIN$ + suite.addTestSuite( DataRefreshExecutorTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java new file mode 100644 index 000000000..ad9900795 --- /dev/null +++ b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.java @@ -0,0 +1,75 @@ +package org.apache.maven.archiva.scheduler.executors; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.io.FileUtils; +import org.apache.maven.archiva.configuration.ArchivaConfiguration; +import org.apache.maven.archiva.configuration.Configuration; +import org.apache.maven.archiva.scheduler.task.DataRefreshTask; +import org.codehaus.plexus.PlexusTestCase; +import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; +import org.codehaus.plexus.taskqueue.execution.TaskExecutor; + +import java.io.File; + +/** + * IndexerTaskExecutorTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class DataRefreshExecutorTest + extends PlexusTestCase +{ + private TaskExecutor taskExecutor; + + protected void setUp() + throws Exception + { + super.setUp(); + + taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" ); + + ArchivaConfiguration archivaConfiguration = + (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() ); + Configuration configuration = archivaConfiguration.getConfiguration(); + + File indexPath = new File( configuration.getIndexPath() ); + if ( indexPath.exists() ) + { + FileUtils.deleteDirectory( indexPath ); + } + } + + public void testExecutor() + throws TaskExecutionException + { + taskExecutor.executeTask( new TestDataRefreshTask() ); + } + + class TestDataRefreshTask + extends DataRefreshTask + { + public String getJobName() + { + return "TestDataRefresh"; + } + } +} diff --git a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java b/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java deleted file mode 100644 index 8729e0ccb..000000000 --- a/archiva-core/src/test/java/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.java +++ /dev/null @@ -1,75 +0,0 @@ -package org.apache.maven.archiva.scheduler.executors; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.io.FileUtils; -import org.apache.maven.archiva.configuration.ArchivaConfiguration; -import org.apache.maven.archiva.configuration.Configuration; -import org.apache.maven.archiva.scheduler.task.IndexerTask; -import org.codehaus.plexus.PlexusTestCase; -import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; -import org.codehaus.plexus.taskqueue.execution.TaskExecutor; - -import java.io.File; - -/** - * IndexerTaskExecutorTest - * - * @author Joakim Erdfelt - * @version $Id$ - */ -public class IndexerTaskExecutorTest - extends PlexusTestCase -{ - private TaskExecutor taskExecutor; - - protected void setUp() - throws Exception - { - super.setUp(); - - taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "indexer" ); - - ArchivaConfiguration archivaConfiguration = - (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() ); - Configuration configuration = archivaConfiguration.getConfiguration(); - - File indexPath = new File( configuration.getIndexPath() ); - if ( indexPath.exists() ) - { - FileUtils.deleteDirectory( indexPath ); - } - } - - public void testIndexer() - throws TaskExecutionException - { - taskExecutor.executeTask( new TestIndexerTask() ); - } - - class TestIndexerTask - extends IndexerTask - { - public String getJobName() - { - return "TestIndexer"; - } - } -} diff --git a/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml b/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml new file mode 100644 index 000000000..5a7d8101b --- /dev/null +++ b/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/DataRefreshExecutorTest.xml @@ -0,0 +1,90 @@ + + + + + + + org.codehaus.plexus.registry.Registry + org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry + commons-configuration + + + + + + + + + org.codehaus.plexus.jdo.JdoFactory + archiva + org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory + + + + org.hsqldb.jdbcDriver + jdbc:hsqldb:mem:TESTDB + sa + + + + org.jpox.PersistenceManagerFactoryImpl + + + javax.jdo.PersistenceManagerFactoryClass + org.jpox.PersistenceManagerFactoryImpl + + + org.jpox.autoCreateSchema + true + + + org.jpox.validateTables + false + + + org.jpox.validateConstraints + false + + + org.jpox.validateColumns + false + + + org.jpox.autoStartMechanism + None + + + org.jpox.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.poid.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.rdbms.dateTimezone + JDK_DEFAULT_TIMEZONE + + + + + + diff --git a/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml b/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml deleted file mode 100644 index 34d15023a..000000000 --- a/archiva-core/src/test/resources/org/apache/maven/archiva/scheduler/executors/IndexerTaskExecutorTest.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - - - - - org.codehaus.plexus.registry.Registry - org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry - commons-configuration - - - - - - - - diff --git a/archiva-discoverer/pom.xml b/archiva-discoverer/pom.xml index 90ccb9fac..8a96e39c5 100755 --- a/archiva-discoverer/pom.xml +++ b/archiva-discoverer/pom.xml @@ -27,8 +27,12 @@ 4.0.0 archiva-discoverer - Archiva Artifact Discoverer + Archiva Discoverer + + org.apache.maven.archiva + archiva-common + org.codehaus.plexus plexus-utils diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java deleted file mode 100644 index f3002ec53..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscoverer.java +++ /dev/null @@ -1,117 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -import java.io.File; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; - -/** - * Base class for artifact discoverers. - * - * @author John Casey - * @author Brett Porter - */ -public abstract class AbstractArtifactDiscoverer - extends AbstractDiscoverer - implements ArtifactDiscoverer -{ - /** - * Standard patterns to exclude from discovery as they are not artifacts. - */ - private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".index", ".reports/**", - ".maven/**", "**/*.md5", "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**", - "*/licenses/**", "*/licences/**", "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*", - "**/CHANGELOG*", "**/KEYS*"}; - - private List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns ) - { - return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES ); - } - - public List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter ) - throws DiscovererException - { - if ( !"file".equals( repository.getProtocol() ) ) - { - throw new UnsupportedOperationException( "Only filesystem repositories are supported" ); - } - - File repositoryBase = new File( repository.getBasedir() ); - - List artifacts = new ArrayList(); - - if ( repositoryBase.exists() ) - { - List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns ); - - for ( Iterator i = artifactPaths.iterator(); i.hasNext(); ) - { - String path = (String) i.next(); - - try - { - Artifact artifact = buildArtifactFromPath( path, repository ); - - if ( filter.include( artifact ) ) - { - artifacts.add( artifact ); - } - else - { - addExcludedPath( path, "Omitted by filter" ); - } - } - catch ( DiscovererException e ) - { - addKickedOutPath( path, e.getMessage() ); - } - } - } - return artifacts; - } - - /** - * Returns an artifact object that is represented by the specified path in a repository - * - * @param path The path that is pointing to an artifact - * @param repository The repository of the artifact - * @return Artifact - * @throws DiscovererException when the specified path does correspond to an artifact - */ - public Artifact buildArtifactFromPath( String path, ArtifactRepository repository ) - throws DiscovererException - { - Artifact artifact = buildArtifact( path ); - - if ( artifact != null ) - { - artifact.setRepository( repository ); - artifact.setFile( new File( repository.getBasedir(), path ) ); - } - - return artifact; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java deleted file mode 100644 index 7e0ee4fea..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/AbstractDiscoverer.java +++ /dev/null @@ -1,158 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.factory.ArtifactFactory; -import org.codehaus.plexus.logging.AbstractLogEnabled; -import org.codehaus.plexus.util.DirectoryScanner; -import org.codehaus.plexus.util.FileUtils; - -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; - -/** - * Base class for the artifact and metadata discoverers. - * - * @author Brett Porter - */ -public abstract class AbstractDiscoverer - extends AbstractLogEnabled - implements Discoverer -{ - private List kickedOutPaths = new ArrayList(); - - /** - * @plexus.requirement - */ - protected ArtifactFactory artifactFactory; - - private static final String[] EMPTY_STRING_ARRAY = new String[0]; - - private List excludedPaths = new ArrayList(); - - /** - * @plexus.configuration default-value="true" - */ - private boolean trackOmittedPaths; - - /** - * Add a path to the list of files that were kicked out due to being invalid. - * - * @param path the path to add - * @param reason the reason why the path is being kicked out - */ - protected void addKickedOutPath( String path, String reason ) - { - if ( trackOmittedPaths ) - { - kickedOutPaths.add( new DiscovererPath( path, reason ) ); - } - } - - /** - * Add a path to the list of files that were excluded. - * - * @param path the path to add - * @param reason the reason why the path is excluded - */ - protected void addExcludedPath( String path, String reason ) - { - excludedPaths.add( new DiscovererPath( path, reason ) ); - } - - /** - * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object - * - * @return Iterator for the DiscovererPath List - */ - public Iterator getKickedOutPathsIterator() - { - assert trackOmittedPaths; - return kickedOutPaths.iterator(); - } - - protected List scanForArtifactPaths( File repositoryBase, List blacklistedPatterns, String[] includes, - String[] excludes ) - { - List allExcludes = new ArrayList(); - allExcludes.addAll( FileUtils.getDefaultExcludesAsList() ); - if ( excludes != null ) - { - allExcludes.addAll( Arrays.asList( excludes ) ); - } - if ( blacklistedPatterns != null ) - { - allExcludes.addAll( blacklistedPatterns ); - } - - DirectoryScanner scanner = new DirectoryScanner(); - - scanner.setBasedir( repositoryBase ); - - if ( includes != null ) - { - scanner.setIncludes( includes ); - } - scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) ); - - // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries) - scanner.scan(); - - if ( trackOmittedPaths ) - { - for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); ) - { - String path = files.next().toString(); - - excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) ); - } - } - - // TODO: this could be a part of the scanner - List includedPaths = new ArrayList(); - for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); ) - { - String path = files.next().toString(); - - includedPaths.add( path ); - } - - return includedPaths; - } - - /** - * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded - * - * @return Iterator for the DiscovererPath List - */ - public Iterator getExcludedPathsIterator() - { - assert trackOmittedPaths; - return excludedPaths.iterator(); - } - - public void setTrackOmittedPaths( boolean trackOmittedPaths ) - { - this.trackOmittedPaths = trackOmittedPaths; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java deleted file mode 100644 index 50873b284..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.java +++ /dev/null @@ -1,66 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -import java.util.List; - -/** - * Interface for implementation that can discover artifacts within a repository. - * - * @author John Casey - * @author Brett Porter - * @todo do we want blacklisted patterns in another form? Part of the object construction? - * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)? - * @todo instead of a returned list, should a listener be passed in? - */ -public interface ArtifactDiscoverer - extends Discoverer -{ - String ROLE = ArtifactDiscoverer.class.getName(); - - /** - * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found. - * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice - * consecutively even if unchanged, so any users of this list must handle such a situation gracefully. - * - * @param repository the location of the repository - * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning - * @param filter filter for artifacts to include in the discovered list - * @return the list of artifacts discovered - * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress - */ - List discoverArtifacts( ArtifactRepository repository, List blacklistedPatterns, ArtifactFilter filter ) - throws DiscovererException; - - /** - * Build an artifact from a path in the repository - * - * @param path the path - * @return the artifact - * @throws DiscovererException if the file is not a valid artifact - * @todo this should be in maven-artifact - */ - Artifact buildArtifact( String path ) - throws DiscovererException; -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java deleted file mode 100644 index f085602da..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscoverer.java +++ /dev/null @@ -1,200 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.lang.StringUtils; -import org.apache.maven.artifact.Artifact; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.StringTokenizer; - -/** - * Artifact discoverer for the new repository layout (Maven 2.0+). - * - * @author John Casey - * @author Brett Porter - * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="default" - */ -public class DefaultArtifactDiscoverer - extends AbstractArtifactDiscoverer -{ - /** - * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String) - */ - public Artifact buildArtifact( String path ) - throws DiscovererException - { - List pathParts = new ArrayList(); - StringTokenizer st = new StringTokenizer( path, "/\\" ); - while ( st.hasMoreTokens() ) - { - pathParts.add( st.nextToken() ); - } - - Collections.reverse( pathParts ); - - Artifact artifact; - if ( pathParts.size() >= 4 ) - { - // maven 2.x path - - // the actual artifact filename. - String filename = (String) pathParts.remove( 0 ); - - // the next one is the version. - String version = (String) pathParts.remove( 0 ); - - // the next one is the artifactId. - String artifactId = (String) pathParts.remove( 0 ); - - // the remaining are the groupId. - Collections.reverse( pathParts ); - String groupId = StringUtils.join( pathParts.iterator(), "." ); - - String remainingFilename = filename; - if ( remainingFilename.startsWith( artifactId + "-" ) ) - { - remainingFilename = remainingFilename.substring( artifactId.length() + 1 ); - - String classifier = null; - - // TODO: use artifact handler, share with legacy discoverer - String type; - if ( remainingFilename.endsWith( ".tar.gz" ) ) - { - type = "distribution-tgz"; - remainingFilename = - remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() ); - } - else if ( remainingFilename.endsWith( ".zip" ) ) - { - type = "distribution-zip"; - remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() ); - } - else if ( remainingFilename.endsWith( "-test-sources.jar" ) ) - { - type = "java-source"; - classifier = "test-sources"; - remainingFilename = - remainingFilename.substring( 0, remainingFilename.length() - "-test-sources.jar".length() ); - } - else if ( remainingFilename.endsWith( "-sources.jar" ) ) - { - type = "java-source"; - classifier = "sources"; - remainingFilename = - remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() ); - } - else - { - int index = remainingFilename.lastIndexOf( "." ); - if ( index >= 0 ) - { - type = remainingFilename.substring( index + 1 ); - remainingFilename = remainingFilename.substring( 0, index ); - } - else - { - throw new DiscovererException( "Path filename does not have an extension" ); - } - } - - Artifact result; - if ( classifier == null ) - { - result = - artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type ); - } - else - { - result = - artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier ); - } - - if ( result.isSnapshot() ) - { - // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b - int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 ); - if ( classifierIndex >= 0 ) - { - classifier = remainingFilename.substring( classifierIndex + 1 ); - remainingFilename = remainingFilename.substring( 0, classifierIndex ); - result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename, - type, classifier ); - } - else - { - result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename, - Artifact.SCOPE_RUNTIME, type ); - } - - // poor encapsulation requires we do this to populate base version - if ( !result.isSnapshot() ) - { - throw new DiscovererException( "Failed to create a snapshot artifact: " + result ); - } - else if ( !result.getBaseVersion().equals( version ) ) - { - throw new DiscovererException( - "Built snapshot artifact base version does not match path version: " + result + - "; should have been version: " + version ); - } - else - { - artifact = result; - } - } - else if ( !remainingFilename.startsWith( version ) ) - { - throw new DiscovererException( "Built artifact version does not match path version" ); - } - else if ( !remainingFilename.equals( version ) ) - { - if ( remainingFilename.charAt( version.length() ) == '-' ) - { - classifier = remainingFilename.substring( version.length() + 1 ); - artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, - classifier ); - } - else - { - throw new DiscovererException( "Path version does not corresspond to an artifact version" ); - } - } - else - { - artifact = result; - } - } - else - { - throw new DiscovererException( "Path filename does not correspond to an artifact" ); - } - } - else - { - throw new DiscovererException( "Path is too short to build an artifact from" ); - } - - return artifact; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java new file mode 100644 index 000000000..bd5e4fd44 --- /dev/null +++ b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultDiscoverer.java @@ -0,0 +1,178 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.common.consumers.Consumer; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.AbstractLogEnabled; +import org.codehaus.plexus.util.DirectoryWalker; +import org.codehaus.plexus.util.FileUtils; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * Discoverer Implementation. + * + * @author Brett Porter + * @author Joakim Erdfelt + * @plexus.component role="org.apache.maven.archiva.discoverer.Discoverer" + */ +public class DefaultDiscoverer + extends AbstractLogEnabled + implements Discoverer +{ + /** + * Standard patterns to exclude from discovery as they are usually noise. + */ + private static final String[] STANDARD_DISCOVERY_EXCLUDES = { + "bin/**", + "reports/**", + ".index", + ".reports/**", + ".maven/**", + "**/*snapshot-version", + "*/website/**", + "*/licences/**", + "**/.htaccess", + "**/*.html", + "**/*.txt", + "**/README*", + "**/CHANGELOG*", + "**/KEYS*" }; + + public DefaultDiscoverer() + { + } + + public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots ) + throws DiscovererException + { + return walkRepository( repository, consumers, includeSnapshots, 0, null, null ); + } + + public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, + boolean includeSnapshots, long onlyModifiedAfterTimestamp, + List extraFileExclusions, List extraFileInclusions ) + throws DiscovererException + { + // Sanity Check + + if ( repository == null ) + { + throw new IllegalArgumentException( "Unable to operate on a null repository." ); + } + + if ( !"file".equals( repository.getProtocol() ) ) + { + throw new UnsupportedOperationException( "Only filesystem repositories are supported." ); + } + + File repositoryBase = new File( repository.getBasedir() ); + + if ( !repositoryBase.exists() ) + { + throw new UnsupportedOperationException( "Unable to scan a repository, directory " + + repositoryBase.getAbsolutePath() + " does not exist." ); + } + + if ( !repositoryBase.isDirectory() ) + { + throw new UnsupportedOperationException( "Unable to scan a repository, path " + + repositoryBase.getAbsolutePath() + " is not a directory." ); + } + + // Setup Includes / Excludes. + + List allExcludes = new ArrayList(); + List allIncludes = new ArrayList(); + + // Exclude all of the SCM patterns. + allExcludes.addAll( FileUtils.getDefaultExcludesAsList() ); + + // Exclude all of the archiva noise patterns. + allExcludes.addAll( Arrays.asList( STANDARD_DISCOVERY_EXCLUDES ) ); + + if ( !includeSnapshots ) + { + allExcludes.add( "**/*-SNAPSHOT*" ); + } + + if ( extraFileExclusions != null ) + { + allExcludes.addAll( extraFileExclusions ); + } + + Iterator it = consumers.iterator(); + while ( it.hasNext() ) + { + Consumer consumer = (Consumer) it.next(); + + /* NOTE: Do not insert the consumer exclusion patterns here. + * Exclusion patterns are handled by RepositoryScanner.wantsFile(Consumer, String) + * + * addUniqueElements( consumer.getExcludePatterns(), allExcludes ); + */ + addUniqueElements( consumer.getIncludePatterns(), allIncludes ); + } + + if ( extraFileInclusions != null ) + { + allIncludes.addAll( extraFileInclusions ); + } + + // Setup Directory Walker + + DirectoryWalker dirWalker = new DirectoryWalker(); + + dirWalker.setBaseDir( repositoryBase ); + + dirWalker.setIncludes( allIncludes ); + dirWalker.setExcludes( allExcludes ); + + // Setup the Scan Instance + RepositoryScanner repoScanner = new RepositoryScanner( repository, consumers ); + repoScanner.setOnlyModifiedAfterTimestamp( onlyModifiedAfterTimestamp ); + + repoScanner.setLogger( getLogger() ); + dirWalker.addDirectoryWalkListener( repoScanner ); + + // Execute scan. + dirWalker.scan(); + + return repoScanner.getStatistics(); + } + + private void addUniqueElements( List fromList, List toList ) + { + Iterator itFrom = fromList.iterator(); + while ( itFrom.hasNext() ) + { + Object o = itFrom.next(); + if ( !toList.contains( o ) ) + { + toList.add( o ); + } + } + } +} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java deleted file mode 100644 index f97972381..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscoverer.java +++ /dev/null @@ -1,226 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter; -import org.apache.maven.archiva.discoverer.filter.MetadataFilter; -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.Metadata; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader; -import org.codehaus.plexus.util.xml.pull.XmlPullParserException; - -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.io.Reader; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.StringTokenizer; - -/** - * This class gets all the paths that contain the metadata files. - * - * @plexus.component role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" role-hint="default" - */ -public class DefaultMetadataDiscoverer - extends AbstractDiscoverer - implements MetadataDiscoverer -{ - /** - * Standard patterns to include in discovery of metadata files. - * - * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due - * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in - * searching the local metadata in the first place though? - */ - private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"}; - - public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter filter ) - throws DiscovererException - { - if ( !"file".equals( repository.getProtocol() ) ) - { - throw new UnsupportedOperationException( "Only filesystem repositories are supported" ); - } - - List metadataFiles = new ArrayList(); - - File repositoryBase = new File( repository.getBasedir() ); - if ( repositoryBase.exists() ) - { - List metadataPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns, - STANDARD_DISCOVERY_INCLUDES, null ); - - for ( Iterator i = metadataPaths.iterator(); i.hasNext(); ) - { - String metadataPath = (String) i.next(); - try - { - RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath ); - File f = new File( repository.getBasedir(), metadataPath ); - if ( filter.include( metadata, f.lastModified() ) ) - { - metadataFiles.add( metadata ); - } - else - { - addExcludedPath( metadataPath, "Metadata excluded by filter" ); - } - } - catch ( DiscovererException e ) - { - addKickedOutPath( metadataPath, e.getMessage() ); - } - } - } - return metadataFiles; - } - - public List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns ) - throws DiscovererException - { - return discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() ); - } - - private RepositoryMetadata buildMetadata( String repo, String metadataPath ) - throws DiscovererException - { - Metadata m; - File f = new File( repo, metadataPath ); - try - { - Reader reader = new FileReader( f ); - MetadataXpp3Reader metadataReader = new MetadataXpp3Reader(); - - m = metadataReader.read( reader ); - } - catch ( XmlPullParserException e ) - { - throw new DiscovererException( "Error parsing metadata file '" + f + "': " + e.getMessage(), e ); - } - catch ( IOException e ) - { - throw new DiscovererException( "Error reading metadata file '" + f + "': " + e.getMessage(), e ); - } - - RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath ); - - if ( repositoryMetadata == null ) - { - throw new DiscovererException( "Unable to build a repository metadata from path" ); - } - - return repositoryMetadata; - } - - /** - * Builds a RepositoryMetadata object from a Metadata object and its path. - * - * @param m Metadata - * @param metadataPath path - * @return RepositoryMetadata if the parameters represent one; null if not - * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc) - */ - private RepositoryMetadata buildMetadata( Metadata m, String metadataPath ) - { - String metaGroupId = m.getGroupId(); - String metaArtifactId = m.getArtifactId(); - String metaVersion = m.getVersion(); - - // check if the groupId, artifactId and version is in the - // metadataPath - // parse the path, in reverse order - List pathParts = new ArrayList(); - StringTokenizer st = new StringTokenizer( metadataPath, "/\\" ); - while ( st.hasMoreTokens() ) - { - pathParts.add( st.nextToken() ); - } - - Collections.reverse( pathParts ); - // remove the metadata file - pathParts.remove( 0 ); - Iterator it = pathParts.iterator(); - String tmpDir = (String) it.next(); - - Artifact artifact = null; - if ( StringUtils.isNotEmpty( metaVersion ) ) - { - artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion ); - } - - // snapshotMetadata - RepositoryMetadata metadata = null; - if ( tmpDir != null && tmpDir.equals( metaVersion ) ) - { - if ( artifact != null ) - { - metadata = new SnapshotArtifactRepositoryMetadata( artifact ); - } - } - else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) ) - { - // artifactMetadata - if ( artifact != null ) - { - metadata = new ArtifactRepositoryMetadata( artifact ); - } - else - { - artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" ); - metadata = new ArtifactRepositoryMetadata( artifact ); - } - } - else - { - String groupDir = ""; - int ctr = 0; - for ( it = pathParts.iterator(); it.hasNext(); ) - { - String path = (String) it.next(); - if ( ctr == 0 ) - { - groupDir = path; - } - else - { - groupDir = path + "." + groupDir; - } - ctr++; - } - - // groupMetadata - if ( metaGroupId != null && metaGroupId.equals( groupDir ) ) - { - metadata = new GroupRepositoryMetadata( metaGroupId ); - } - } - - return metadata; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java index 157557bf2..4f670c4e8 100644 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java +++ b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/Discoverer.java @@ -19,26 +19,55 @@ package org.apache.maven.archiva.discoverer; * under the License. */ -import java.util.Iterator; +import org.apache.maven.artifact.repository.ArtifactRepository; + +import java.io.File; +import java.util.List; /** - * @author Edwin Punzalan + * Discoverer - generic discoverer of content in an ArtifactRepository. + * + * @author Joakim Erdfelt + * @version $Id$ */ public interface Discoverer { + public static final String ROLE = Discoverer.class.getName(); + /** - * Get the list of paths kicked out during the discovery process. - * - * @return the paths as Strings. + * Walk the repository, and report to the consumers the files found. + * + * Report changes to the appropriate Consumer. + * + * This is just a convenience method to {@link #walkRepository(ArtifactRepository, List, boolean, long, List, List)} + * equivalent to calling walkRepository( repository, consumers, includeSnapshots, 0, null, null ); + * + * @param repository the repository to change. + * @param consumers use the provided list of consumers. + * @param includeSnapshots true to include snapshots in the walking of this repository. + * @return the statistics for this scan. + * @throws DiscovererException if there was a fundamental problem with getting the discoverer started. */ - Iterator getKickedOutPathsIterator(); + public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, boolean includeSnapshots ) + throws DiscovererException; /** - * Get the list of paths excluded during the discovery process. - * - * @return the paths as Strings. + * Walk the repository, and report to the consumers the files found. + * + * Report changes to the appropriate Consumer. + * + * @param repository the repository to change. + * @param consumers use the provided list of consumers. + * @param includeSnapshots true to include snapshots in the scanning of this repository. + * @param onlyModifiedAfterTimestamp Only report to the consumers, files that have a {@link File#lastModified()}) + * after the provided timestamp. + * @param extraFileExclusions an optional list of file exclusions on the walk. + * @param extraFileInclusions an optional list of file inclusions on the walk. + * @return the statistics for this scan. + * @throws DiscovererException if there was a fundamental problem with getting the discoverer started. */ - Iterator getExcludedPathsIterator(); - - void setTrackOmittedPaths( boolean trackOmittedPaths ); + public DiscovererStatistics walkRepository( ArtifactRepository repository, List consumers, + boolean includeSnapshots, long onlyModifiedAfterTimestamp, + List extraFileExclusions, List extraFileInclusions ) + throws DiscovererException; } diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java deleted file mode 100644 index b8930780b..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererPath.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * @author Edwin Punzalan - */ -public class DiscovererPath -{ - /** - * The path discovered. - */ - private final String path; - - /** - * A comment about why the path is being processed. - */ - private final String comment; - - public DiscovererPath( String path, String comment ) - { - this.path = path; - this.comment = comment; - } - - public String getPath() - { - return path; - } - - public String getComment() - { - return comment; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java new file mode 100644 index 000000000..28c218658 --- /dev/null +++ b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/DiscovererStatistics.java @@ -0,0 +1,198 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.math.NumberUtils; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.Logger; +import org.codehaus.plexus.util.IOUtil; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Properties; + +/** + * DiscovererStatistics + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class DiscovererStatistics +{ + private static final String PROP_FILES_CONSUMED = "scan.consumed.files"; + + private static final String PROP_FILES_INCLUDED = "scan.included.files"; + + private static final String PROP_FILES_SKIPPED = "scan.skipped.files"; + + private static final String PROP_TIMESTAMP_STARTED = "scan.started.timestamp"; + + private static final String PROP_TIMESTAMP_FINISHED = "scan.finished.timestamp"; + + protected long timestampStarted = 0; + + protected long timestampFinished = 0; + + protected long filesIncluded = 0; + + protected long filesConsumed = 0; + + protected long filesSkipped = 0; + + private ArtifactRepository repository; + + public DiscovererStatistics( ArtifactRepository repository ) + { + this.repository = repository; + } + + public void load( String filename ) + throws IOException + { + File repositoryBase = new File( this.repository.getBasedir() ); + + File scanProperties = new File( repositoryBase, filename ); + FileInputStream fis = null; + try + { + Properties props = new Properties(); + fis = new FileInputStream( scanProperties ); + props.load( fis ); + + timestampFinished = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_FINISHED ), 0 ); + timestampStarted = NumberUtils.toLong( props.getProperty( PROP_TIMESTAMP_STARTED ), 0 ); + filesIncluded = NumberUtils.toLong( props.getProperty( PROP_FILES_INCLUDED ), 0 ); + filesConsumed = NumberUtils.toLong( props.getProperty( PROP_FILES_CONSUMED ), 0 ); + filesSkipped = NumberUtils.toLong( props.getProperty( PROP_FILES_SKIPPED ), 0 ); + } + catch ( IOException e ) + { + reset(); + throw e; + } + finally + { + IOUtil.close( fis ); + } + } + + public void save( String filename ) + throws IOException + { + Properties props = new Properties(); + props.setProperty( PROP_TIMESTAMP_FINISHED, String.valueOf( timestampFinished ) ); + props.setProperty( PROP_TIMESTAMP_STARTED, String.valueOf( timestampStarted ) ); + props.setProperty( PROP_FILES_INCLUDED, String.valueOf( filesIncluded ) ); + props.setProperty( PROP_FILES_CONSUMED, String.valueOf( filesConsumed ) ); + props.setProperty( PROP_FILES_SKIPPED, String.valueOf( filesSkipped ) ); + + File repositoryBase = new File( this.repository.getBasedir() ); + File statsFile = new File( repositoryBase, filename ); + + FileOutputStream fos = null; + try + { + fos = new FileOutputStream( statsFile ); + props.store( fos, "Last Scan Information, managed by Archiva. DO NOT EDIT" ); + fos.flush(); + } + finally + { + IOUtil.close( fos ); + } + } + + public void reset() + { + timestampStarted = 0; + timestampFinished = 0; + filesIncluded = 0; + filesConsumed = 0; + filesSkipped = 0; + } + + public long getElapsedMilliseconds() + { + return timestampFinished - timestampStarted; + } + + public long getFilesConsumed() + { + return filesConsumed; + } + + public long getFilesIncluded() + { + return filesIncluded; + } + + public ArtifactRepository getRepository() + { + return repository; + } + + public long getTimestampFinished() + { + return timestampFinished; + } + + public long getTimestampStarted() + { + return timestampStarted; + } + + public long getFilesSkipped() + { + return filesSkipped; + } + + public void setTimestampFinished( long timestampFinished ) + { + this.timestampFinished = timestampFinished; + } + + public void setTimestampStarted( long timestampStarted ) + { + this.timestampStarted = timestampStarted; + } + + public void dump( Logger logger ) + { + logger.info( "----------------------------------------------------" ); + logger.info( "Scan of Repository: " + repository.getId() ); + logger.info( " Started : " + toHumanTimestamp( this.getTimestampStarted() ) ); + logger.info( " Finished: " + toHumanTimestamp( this.getTimestampFinished() ) ); + // TODO: pretty print ellapsed time. + logger.info( " Duration: " + this.getElapsedMilliseconds() + "ms" ); + logger.info( " Files : " + this.getFilesIncluded() ); + logger.info( " Consumed: " + this.getFilesConsumed() ); + logger.info( " Skipped : " + this.getFilesSkipped() ); + } + + private String toHumanTimestamp( long timestamp ) + { + SimpleDateFormat dateFormat = new SimpleDateFormat(); + return dateFormat.format( new Date( timestamp ) ); + } +} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java deleted file mode 100644 index 13b8080a4..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscoverer.java +++ /dev/null @@ -1,291 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; - -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.StringTokenizer; - -/** - * Artifact discoverer for the legacy repository layout (Maven 1.x). - * Method used to build an artifact object using a relative path from a repository base directory. An artifactId - * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS", - * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as - * they are reserved for version usage. - * - * @author John Casey - * @author Brett Porter - * @plexus.component role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" role-hint="legacy" - */ -public class LegacyArtifactDiscoverer - extends AbstractArtifactDiscoverer -{ - /** - * @see org.apache.maven.archiva.discoverer.ArtifactDiscoverer#buildArtifact(String) - */ - public Artifact buildArtifact( String path ) - throws DiscovererException - { - StringTokenizer tokens = new StringTokenizer( path, "/\\" ); - - Artifact result; - - int numberOfTokens = tokens.countTokens(); - - if ( numberOfTokens == 3 ) - { - String groupId = tokens.nextToken(); - - String type = tokens.nextToken(); - - if ( type.endsWith( "s" ) ) - { - type = type.substring( 0, type.length() - 1 ); - - // contains artifactId, version, classifier, and extension. - String avceGlob = tokens.nextToken(); - - //noinspection CollectionDeclaredAsConcreteClass - LinkedList avceTokenList = new LinkedList(); - - StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" ); - while ( avceTokenizer.hasMoreTokens() ) - { - avceTokenList.addLast( avceTokenizer.nextToken() ); - } - - String lastAvceToken = (String) avceTokenList.removeLast(); - - // TODO: share with other discoverer, use artifact handlers instead - if ( lastAvceToken.endsWith( ".tar.gz" ) ) - { - type = "distribution-tgz"; - - lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() ); - - avceTokenList.addLast( lastAvceToken ); - } - else if ( lastAvceToken.endsWith( "sources.jar" ) ) - { - type = "java-source"; - - lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() ); - - avceTokenList.addLast( lastAvceToken ); - } - else if ( lastAvceToken.endsWith( "javadoc.jar" ) ) - { - type = "javadoc.jar"; - - lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() ); - - avceTokenList.addLast( lastAvceToken ); - } - else if ( lastAvceToken.endsWith( ".zip" ) ) - { - type = "distribution-zip"; - - lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() ); - - avceTokenList.addLast( lastAvceToken ); - } - else - { - int extPos = lastAvceToken.lastIndexOf( '.' ); - - if ( extPos > 0 ) - { - String ext = lastAvceToken.substring( extPos + 1 ); - if ( type.equals( ext ) || "plugin".equals( type ) ) - { - lastAvceToken = lastAvceToken.substring( 0, extPos ); - - avceTokenList.addLast( lastAvceToken ); - } - else - { - throw new DiscovererException( "Path type does not match the extension" ); - } - } - else - { - throw new DiscovererException( "Path filename does not have an extension" ); - } - } - - // let's discover the version, and whatever's leftover will be either - // a classifier, or part of the artifactId, depending on position. - // Since version is at the end, we have to move in from the back. - Collections.reverse( avceTokenList ); - - // TODO: this is obscene - surely a better way? - String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" + - "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" + - "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" + - "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" + - "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" + - "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" + - "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)"; - - StringBuffer classifierBuffer = new StringBuffer(); - StringBuffer versionBuffer = new StringBuffer(); - - boolean firstVersionTokenEncountered = false; - boolean firstToken = true; - - int tokensIterated = 0; - for ( Iterator it = avceTokenList.iterator(); it.hasNext(); ) - { - String token = (String) it.next(); - - boolean tokenIsVersionPart = token.matches( validVersionParts ); - - StringBuffer bufferToUpdate; - - // NOTE: logic in code is reversed, since we're peeling off the back - // Any token after the last versionPart will be in the classifier. - // Any token UP TO first non-versionPart is part of the version. - if ( !tokenIsVersionPart ) - { - if ( firstVersionTokenEncountered ) - { - //noinspection BreakStatement - break; - } - else - { - bufferToUpdate = classifierBuffer; - } - } - else - { - firstVersionTokenEncountered = true; - - bufferToUpdate = versionBuffer; - } - - if ( firstToken ) - { - firstToken = false; - } - else - { - bufferToUpdate.insert( 0, '-' ); - } - - bufferToUpdate.insert( 0, token ); - - tokensIterated++; - } - - // Now, restore the proper ordering so we can build the artifactId. - Collections.reverse( avceTokenList ); - - // if we didn't find a version, then punt. Use the last token - // as the version, and set the classifier empty. - if ( versionBuffer.length() < 1 ) - { - if ( avceTokenList.size() > 1 ) - { - int lastIdx = avceTokenList.size() - 1; - - versionBuffer.append( avceTokenList.get( lastIdx ) ); - avceTokenList.remove( lastIdx ); - } - - classifierBuffer.setLength( 0 ); - } - else - { - // if everything is kosher, then pop off all the classifier and - // version tokens, leaving the naked artifact id in the list. - avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) ); - } - - StringBuffer artifactIdBuffer = new StringBuffer(); - - firstToken = true; - for ( Iterator it = avceTokenList.iterator(); it.hasNext(); ) - { - String token = (String) it.next(); - - if ( firstToken ) - { - firstToken = false; - } - else - { - artifactIdBuffer.append( '-' ); - } - - artifactIdBuffer.append( token ); - } - - String artifactId = artifactIdBuffer.toString(); - - if ( artifactId.length() > 0 ) - { - int lastVersionCharIdx = versionBuffer.length() - 1; - if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' ) - { - versionBuffer.setLength( lastVersionCharIdx ); - } - - String version = versionBuffer.toString(); - - if ( version.length() > 0 ) - { - if ( classifierBuffer.length() > 0 ) - { - result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, - classifierBuffer.toString() ); - } - else - { - result = artifactFactory.createArtifact( groupId, artifactId, version, - Artifact.SCOPE_RUNTIME, type ); - } - } - else - { - throw new DiscovererException( "Path filename version is empty" ); - } - } - else - { - throw new DiscovererException( "Path filename artifactId is empty" ); - } - } - else - { - throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" ); - } - } - else - { - throw new DiscovererException( "Path does not match a legacy repository path for an artifact" ); - } - - return result; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java deleted file mode 100644 index ff74a9215..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/MetadataDiscoverer.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.filter.MetadataFilter; -import org.apache.maven.artifact.repository.ArtifactRepository; - -import java.util.List; - -/** - * Interface for discovering metadata files. - */ -public interface MetadataDiscoverer - extends Discoverer -{ - String ROLE = MetadataDiscoverer.class.getName(); - - /** - * Search for metadata files in the repository. - * - * @param repository The repository. - * @param blacklistedPatterns Patterns that are to be excluded from the discovery process. - * @param metadataFilter filter to use on the discovered metadata before returning - * @return the list of artifacts found - * @throws DiscovererException if there is a problem during the discovery process - */ - List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns, MetadataFilter metadataFilter ) - throws DiscovererException; - - /** - * Search for metadata files in the repository. - * - * @param repository The repository. - * @param blacklistedPatterns Patterns that are to be excluded from the discovery process. - * @return the list of artifacts found - * @throws DiscovererException if there is a problem during the discovery process - */ - List discoverMetadata( ArtifactRepository repository, List blacklistedPatterns ) - throws DiscovererException; -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java new file mode 100644 index 000000000..9aacbc392 --- /dev/null +++ b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/RepositoryScanner.java @@ -0,0 +1,210 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.SystemUtils; +import org.apache.maven.archiva.common.consumers.Consumer; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.Logger; +import org.codehaus.plexus.util.DirectoryWalkListener; +import org.codehaus.plexus.util.SelectorUtils; + +import java.io.File; +import java.util.Iterator; +import java.util.List; + +/** + * RepositoryScanner - this is an instance of a scan against a repository. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class RepositoryScanner + implements DirectoryWalkListener +{ + public static final String ROLE = RepositoryScanner.class.getName(); + + private List consumers; + + private ArtifactRepository repository; + + private Logger logger; + + private boolean isCaseSensitive = true; + + private DiscovererStatistics stats; + + private long onlyModifiedAfterTimestamp = 0; + + public RepositoryScanner( ArtifactRepository repository, List consumerList ) + { + this.repository = repository; + this.consumers = consumerList; + stats = new DiscovererStatistics( repository ); + + Iterator it = this.consumers.iterator(); + while ( it.hasNext() ) + { + Consumer consumer = (Consumer) it.next(); + + if ( !consumer.init( this.repository ) ) + { + throw new IllegalStateException( "Consumer [" + consumer.getName() + + "] is reporting that it is incompatible with the [" + repository.getId() + "] repository." ); + } + } + + if ( SystemUtils.IS_OS_WINDOWS ) + { + isCaseSensitive = false; + } + } + + public DiscovererStatistics getStatistics() + { + return stats; + } + + public void directoryWalkStarting( File basedir ) + { + getLogger().info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getBasedir() ); + stats.reset(); + stats.timestampStarted = System.currentTimeMillis(); + } + + public void directoryWalkStep( int percentage, File file ) + { + getLogger().debug( "Walk Step: " + percentage + ", " + file ); + + // Timestamp finished points to the last successful scan, not this current one. + if ( file.lastModified() < onlyModifiedAfterTimestamp ) + { + // Skip file as no change has occured. + getLogger().debug( "Skipping, No Change: " + file.getAbsolutePath() ); + stats.filesSkipped++; + return; + } + + synchronized ( consumers ) + { + stats.filesIncluded++; + + BaseFile basefile = new BaseFile( repository.getBasedir(), file ); + + Iterator itConsumers = this.consumers.iterator(); + while ( itConsumers.hasNext() ) + { + Consumer consumer = (Consumer) itConsumers.next(); + + if ( wantsFile( consumer, basefile.getRelativePath() ) ) + { + try + { + getLogger().debug( "Sending to consumer: " + consumer.getName() ); + stats.filesConsumed++; + consumer.processFile( basefile ); + } + catch ( Exception e ) + { + /* Intentionally Catch all exceptions. + * So that the discoverer processing can continue. + */ + getLogger().error( + "Consumer [" + consumer.getName() + "] had an error when processing file [" + + basefile.getAbsolutePath() + "]: " + e.getMessage(), e ); + } + } + else + { + getLogger().debug( + "Skipping consumer " + consumer.getName() + " for file " + + basefile.getRelativePath() ); + } + } + } + } + + public void directoryWalkFinished() + { + getLogger().info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getBasedir() ); + stats.timestampFinished = System.currentTimeMillis(); + } + + private boolean wantsFile( Consumer consumer, String relativePath ) + { + Iterator it; + + // Test excludes first. + it = consumer.getExcludePatterns().iterator(); + while ( it.hasNext() ) + { + String pattern = (String) it.next(); + if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) ) + { + // Definately does NOT WANT FILE. + return false; + } + } + + // Now test includes. + it = consumer.getIncludePatterns().iterator(); + while ( it.hasNext() ) + { + String pattern = (String) it.next(); + if ( SelectorUtils.matchPath( pattern, relativePath, isCaseSensitive ) ) + { + // Specifically WANTS FILE. + return true; + } + } + + // Not included, and Not excluded? Default to EXCLUDE. + return false; + } + + public long getOnlyModifiedAfterTimestamp() + { + return onlyModifiedAfterTimestamp; + } + + public void setOnlyModifiedAfterTimestamp( long onlyModifiedAfterTimestamp ) + { + this.onlyModifiedAfterTimestamp = onlyModifiedAfterTimestamp; + } + + /** + * Debug method from DirectoryWalker. + */ + public void debug( String message ) + { + getLogger().debug( "Repository Scanner: " + message ); + } + + public Logger getLogger() + { + return logger; + } + + public void setLogger( Logger logger ) + { + this.logger = logger; + } +} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java deleted file mode 100644 index 0a85ba1cf..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllArtifactFilter.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.apache.maven.archiva.discoverer.filter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -/** - * Filter that accepts all. - */ -public class AcceptAllArtifactFilter - implements ArtifactFilter -{ - public boolean include( Artifact artifact ) - { - return true; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java deleted file mode 100644 index 80b49a8a9..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/AcceptAllMetadataFilter.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.apache.maven.archiva.discoverer.filter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; - -/** - * Filter that accepts all. - */ -public class AcceptAllMetadataFilter - implements MetadataFilter -{ - public boolean include( RepositoryMetadata metadata, long timestamp ) - { - return true; - } -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java deleted file mode 100644 index fff5fb5ec..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/MetadataFilter.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.apache.maven.archiva.discoverer.filter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; - -/** - * Ability to filter repository metadata lists. - * - * @todo should be in maven-artifact - */ -public interface MetadataFilter -{ - /** - * Whether to include this metadata in the filtered list. - * - * @param metadata the metadata - * @param timestamp the time to compare against - it will be included if it doesn't exist or is outdated - * @return whether to include it - */ - boolean include( RepositoryMetadata metadata, long timestamp ); -} diff --git a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java b/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java deleted file mode 100644 index a3c457777..000000000 --- a/archiva-discoverer/src/main/java/org/apache/maven/archiva/discoverer/filter/SnapshotArtifactFilter.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.apache.maven.archiva.discoverer.filter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -/** - * A filter to remove snapshot artifacts during discovery. - */ -public class SnapshotArtifactFilter - implements ArtifactFilter -{ - public boolean include( Artifact artifact ) - { - return !artifact.isSnapshot(); - } -} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java deleted file mode 100644 index f1609e3e2..000000000 --- a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractArtifactDiscovererTest.java +++ /dev/null @@ -1,90 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.factory.ArtifactFactory; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; -import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; -import org.codehaus.plexus.PlexusTestCase; - -import java.io.File; - -/** - * @author Edwin Punzalan - */ -public abstract class AbstractArtifactDiscovererTest - extends PlexusTestCase -{ - protected ArtifactDiscoverer discoverer; - - private ArtifactFactory factory; - - protected ArtifactRepository repository; - - protected abstract String getLayout(); - - protected abstract File getRepositoryFile(); - - protected void setUp() - throws Exception - { - super.setUp(); - - discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() ); - - factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE ); - - repository = getRepository(); - } - - protected ArtifactRepository getRepository() - throws Exception - { - File basedir = getRepositoryFile(); - - ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE ); - - ArtifactRepositoryLayout layout = - (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() ); - - return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null ); - } - - protected Artifact createArtifact( String groupId, String artifactId, String version ) - { - Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" ); - artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) ); - artifact.setRepository( repository ); - return artifact; - } - - protected Artifact createArtifact( String groupId, String artifactId, String version, String type ) - { - return factory.createArtifact( groupId, artifactId, version, null, type ); - } - - protected Artifact createArtifact( String groupId, String artifactId, String version, String type, - String classifier ) - { - return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier ); - } -} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java new file mode 100644 index 000000000..e7185610f --- /dev/null +++ b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AbstractDiscovererTestCase.java @@ -0,0 +1,86 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; +import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; +import org.codehaus.plexus.PlexusTestCase; + +import java.io.File; + +/** + * @author Edwin Punzalan + * @author Joakim Erdfelt + */ +public abstract class AbstractDiscovererTestCase + extends PlexusTestCase +{ + protected Discoverer discoverer; + + protected void setUp() + throws Exception + { + super.setUp(); + + discoverer = (Discoverer) lookup( Discoverer.ROLE ); + } + + protected void tearDown() + throws Exception + { + release( discoverer ); + super.tearDown(); + } + + protected ArtifactRepository getLegacyRepository() + throws Exception + { + File repoBaseDir = new File( getBasedir(), "src/test/legacy-repository" ); + ArtifactRepository repository = createRepository( repoBaseDir, "legacy" ); + resetRepositoryState( repository ); + return repository; + } + + protected ArtifactRepository getDefaultRepository() + throws Exception + { + File repoBaseDir = new File( getBasedir(), "src/test/repository" ); + ArtifactRepository repository = createRepository( repoBaseDir, "default" ); + resetRepositoryState( repository ); + return repository; + } + + protected void resetRepositoryState( ArtifactRepository repository ) + { + // Implement any kind of repository cleanup. + } + + protected ArtifactRepository createRepository( File basedir, String layout ) + throws Exception + { + ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE ); + + ArtifactRepositoryLayout repoLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, layout ); + + return factory.createArtifactRepository( "discoveryRepo-" + getName(), "file://" + basedir, repoLayout, null, + null ); + } +} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java new file mode 100644 index 000000000..a32460b5a --- /dev/null +++ b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/AllTests.java @@ -0,0 +1,43 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import junit.framework.Test; +import junit.framework.TestSuite; + +/** + * AllTests - added to allow IDE users to pull all tests into their tool. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class AllTests +{ + + public static Test suite() + { + TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.discoverer" ); + //$JUnit-BEGIN$ + suite.addTestSuite( DefaultDiscovererTest.class ); + //$JUnit-END$ + return suite; + } + +} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java deleted file mode 100644 index cd20645c0..000000000 --- a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultArtifactDiscovererTest.java +++ /dev/null @@ -1,702 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter; -import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter; -import org.apache.maven.artifact.Artifact; -import org.codehaus.plexus.component.repository.exception.ComponentLookupException; - -import java.io.File; -import java.net.MalformedURLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * Test the default artifact discoverer. - * - * @author Brett Porter - * @version $Id:DefaultArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $ - */ -public class DefaultArtifactDiscovererTest - extends AbstractArtifactDiscovererTest -{ - private static final List JAVAX_BLACKLIST = Collections.singletonList( "javax/**" ); - - protected String getLayout() - { - return "default"; - } - - protected File getRepositoryFile() - { - return getTestFile( "src/test/repository" ); - } - - public void testDefaultExcludes() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - boolean b = path.indexOf( "CVS" ) >= 0; - if ( b ) - { - found = true; - assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 ); - assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 ); - } - } - - public void testStandardExcludes() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "KEYS".equals( path ) ) - { - found = true; - assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) ); - } - } - - public void testBlacklistedExclude() - throws DiscovererException - { - List artifacts = discoverArtifactsWithBlacklist( JAVAX_BLACKLIST ); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions", - dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - } - - public void testKickoutWithShortPath() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path is too short to build an artifact from", - dPath.getComment() ); - - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithWrongArtifactId() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals( - path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not wrong jar", - "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithNoType() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path filename does not have an extension", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithWrongVersion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Built artifact version does not match path version", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithLongerVersion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithWrongSnapshotVersion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", - "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithSnapshotBaseVersion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals( - path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", - "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'", - "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) ); - } - } - - public void testInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) ); - } - - public void testArtifactWithClassifier() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", - artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) ); - } - - public void testJavaSourcesInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( - createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) ); - } - - public void testTestSourcesInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( - createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "test-sources" ) ) ); - } - - public void testDistributionInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check zip included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) ); - - assertTrue( "Check tar.gz included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) ); - } - - public void testSnapshotInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - assertTrue( "Check snapshot included", - artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) ); - } - - public void testSnapshotInclusionWithClassifier() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check snapshot included", artifacts.contains( - createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) ); - } - - public void testSnapshotExclusion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - assertFalse( "Check snapshot included", - artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) ); - } - - public void testFileSet() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact artifact = (Artifact) i.next(); - assertNotNull( "Check file is set", artifact.getFile() ); - } - } - - public void testRepositorySet() - throws MalformedURLException, DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - String url = repository.getUrl(); - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact artifact = (Artifact) i.next(); - assertNotNull( "Check repository set", artifact.getRepository() ); - assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() ); - } - } - - public void testStandalonePoms() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - - // cull down to actual artifacts (only standalone poms will have type = pom) - Map keyedArtifacts = new HashMap(); - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion(); - if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) ) - { - keyedArtifacts.put( key, a ); - } - } - - List models = new ArrayList(); - - for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - - if ( "pom".equals( a.getType() ) ) - { - models.add( a ); - } - } - - assertEquals( 4, models.size() ); - - // Define order we expect - Collections.sort( models ); - - Iterator itr = models.iterator(); - Artifact model = (Artifact) itr.next(); - assertEquals( "org.apache.maven", model.getGroupId() ); - assertEquals( "B", model.getArtifactId() ); - assertEquals( "1.0", model.getVersion() ); - model = (Artifact) itr.next(); - assertEquals( "org.apache.maven", model.getGroupId() ); - assertEquals( "B", model.getArtifactId() ); - assertEquals( "2.0", model.getVersion() ); - model = (Artifact) itr.next(); - assertEquals( "org.apache.maven", model.getGroupId() ); - assertEquals( "discovery", model.getArtifactId() ); - assertEquals( "1.0", model.getVersion() ); - model = (Artifact) itr.next(); - assertEquals( "org.apache.testgroup", model.getGroupId() ); - assertEquals( "discovery", model.getArtifactId() ); - assertEquals( "1.0", model.getVersion() ); - } - - public void testShortPath() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid-1.0.jar" ); - - fail( "Artifact should be null for short paths" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testWrongArtifactId() - throws ComponentLookupException - { - - try - { - discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" ); - - fail( "Artifact should be null for wrong ArtifactId" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testNoType() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1/invalid-1" ); - - fail( "Artifact should be null for no type" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testWrongVersion() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" ); - - fail( "Artifact should be null for wrong version" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testLongVersion() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" ); - - fail( "Artifact should be null for long version" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testWrongSnapshotVersion() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" ); - - fail( "Artifact should be null for wrong snapshot version" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testSnapshotBaseVersion() - throws ComponentLookupException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" ); - - fail( "Artifact should be null for snapshot base version" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testPathWithClassifier() - throws ComponentLookupException, DiscovererException - { - String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact ); - } - - public void testWithJavaSourceInclusion() - throws ComponentLookupException, DiscovererException - { - String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact ); - } - - public void testDistributionArtifacts() - throws ComponentLookupException, DiscovererException - { - String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact ); - - testPath = "org/apache/maven/testing/1.0/testing-1.0.zip"; - - artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact ); - } - - public void testSnapshot() - throws ComponentLookupException, DiscovererException - { - String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact ); - - testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar"; - - artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact ); - } - - public void testNormal() - throws ComponentLookupException, DiscovererException - { - String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact ); - } - - public void testSnapshotWithClassifier() - throws ComponentLookupException, DiscovererException - { - String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ), - artifact ); - } - - private List discoverArtifactsWithSnapshots() - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() ); - } - - private List discoverArtifactsWithBlacklist( List list ) - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, list, new SnapshotArtifactFilter() ); - } - - private List discoverArtifacts() - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() ); - } -} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java new file mode 100644 index 000000000..f105fc62b --- /dev/null +++ b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultDiscovererTest.java @@ -0,0 +1,169 @@ +package org.apache.maven.archiva.discoverer; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.repository.ArtifactRepository; +import org.codehaus.plexus.logging.Logger; +import org.codehaus.plexus.logging.console.ConsoleLogger; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +/** + * DefaultDiscovererTest + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class DefaultDiscovererTest + extends AbstractDiscovererTestCase +{ + private MockConsumer createAndAddMockConsumer( List consumers, String includePattern, String excludePattern ) + { + MockConsumer mockConsumer = new MockConsumer(); + mockConsumer.getIncludePatterns().add( includePattern ); + if ( StringUtils.isNotBlank( excludePattern ) ) + { + mockConsumer.getExcludePatterns().add( excludePattern ); + } + consumers.add( mockConsumer ); + return mockConsumer; + } + + private void assertFilesProcessed( int expectedFileCount, DiscovererStatistics stats, MockConsumer mockConsumer ) + { + assertNotNull( "Stats should not be null.", stats ); + assertNotNull( "MockConsumer should not be null.", mockConsumer ); + assertNotNull( "MockConsumer.filesProcessed should not be null.", mockConsumer.getFilesProcessed() ); + + if ( stats.getFilesConsumed() != mockConsumer.getFilesProcessed().size() ) + { + fail( "Somehow, the stats count of files consumed, and the count of actual files " + + "processed by the consumer do not match." ); + } + + int actualFileCount = mockConsumer.getFilesProcessed().size(); + + if ( expectedFileCount != actualFileCount ) + { + stats.dump( new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ) ); + System.out.println( "Base Dir:" + stats.getRepository().getBasedir() ); + int num = 0; + Iterator it = mockConsumer.getFilesProcessed().iterator(); + while ( it.hasNext() ) + { + BaseFile file = (BaseFile) it.next(); + System.out.println( " Processed File [" + num + "]: " + file.getRelativePath() ); + num++; + } + + fail( "Files Processed mismatch: expected:<" + expectedFileCount + ">, actual:<" + actualFileCount + ">" ); + } + } + + public void testLegacyLayoutRepositoryAll() + throws Exception + { + ArtifactRepository repository = getLegacyRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null ); + + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true ); + + assertNotNull( stats ); + + assertFilesProcessed( 16, stats, mockConsumer ); + } + + public void testDefaultLayoutRepositoryAll() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*", null ); + + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true ); + + assertNotNull( stats ); + + assertFilesProcessed( 42, stats, mockConsumer ); + } + + public void testDefaultLayoutRepositoryPomsOnly() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.pom", null ); + + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true ); + + assertNotNull( stats ); + + assertFilesProcessed( 10, stats, mockConsumer ); + } + + public void testDefaultLayoutRepositoryJarsOnly() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null ); + + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, true ); + + assertNotNull( stats ); + + assertFilesProcessed( 17, stats, mockConsumer ); + } + + public void testDefaultLayoutRepositoryJarsNoSnapshots() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null ); + + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false ); + + assertNotNull( stats ); + + assertFilesProcessed( 13, stats, mockConsumer ); + } + + public void testDefaultLayoutRepositoryJarsNoSnapshotsWithExclusions() + throws Exception + { + ArtifactRepository repository = getDefaultRepository(); + List consumers = new ArrayList(); + MockConsumer mockConsumer = createAndAddMockConsumer( consumers, "**/*.jar", null ); + + List exclusions = new ArrayList(); + exclusions.add( "**/*-client.jar" ); + DiscovererStatistics stats = discoverer.walkRepository( repository, consumers, false, 0, exclusions, null ); + + assertNotNull( stats ); + + assertFilesProcessed( 12, stats, mockConsumer ); + } +} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java deleted file mode 100644 index 50ac62bfd..000000000 --- a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/DefaultMetadataDiscovererTest.java +++ /dev/null @@ -1,199 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.factory.ArtifactFactory; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; -import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; -import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; -import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata; -import org.codehaus.plexus.PlexusTestCase; - -import java.io.File; -import java.io.IOException; -import java.util.Iterator; -import java.util.List; - -/** - * This class tests the DefaultMetadataDiscoverer class. - */ -public class DefaultMetadataDiscovererTest - extends PlexusTestCase -{ - private MetadataDiscoverer discoverer; - - private static final String TEST_OPERATION = "test"; - - private ArtifactRepository repository; - - private ArtifactFactory factory; - - /** - * - */ - public void setUp() - throws Exception - { - super.setUp(); - - discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" ); - - factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE ); - - repository = getRepository(); - - removeTimestampMetadata(); - } - - protected ArtifactRepository getRepository() - throws Exception - { - File basedir = getTestFile( "src/test/repository" ); - - ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE ); - - ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" ); - - return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null ); - } - - /** - * - */ - public void tearDown() - throws Exception - { - super.tearDown(); - discoverer = null; - } - - /** - * Test if metadata file in wrong directory was added to the kickedOutPaths. - */ - public void testKickoutWrongDirectory() - throws DiscovererException - { - discoverer.discoverMetadata( repository, null ); - Iterator iter = discoverer.getKickedOutPathsIterator(); - boolean found = false; - while ( iter.hasNext() && !found ) - { - DiscovererPath dPath = (DiscovererPath) iter.next(); - String dir = dPath.getPath(); - - String normalizedDir = dir.replace( '\\', '/' ); - if ( "javax/maven-metadata.xml".equals( normalizedDir ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path", - dPath.getComment() ); - } - } - assertTrue( found ); - } - - /** - * Test if blank metadata file was added to the kickedOutPaths. - */ - public void testKickoutBlankMetadata() - throws DiscovererException - { - discoverer.discoverMetadata( repository, null ); - Iterator iter = discoverer.getKickedOutPathsIterator(); - boolean found = false; - while ( iter.hasNext() && !found ) - { - DiscovererPath dPath = (DiscovererPath) iter.next(); - String dir = dPath.getPath(); - - String normalizedDir = dir.replace( '\\', '/' ); - if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) ) - { - found = true; - assertTrue( "Check reason for kickout", dPath.getComment().matches( - "Error reading metadata file '(.*)': input contained no data" ) ); - } - } - assertTrue( found ); - } - - private void removeTimestampMetadata() - throws IOException - { - // remove the metadata that tracks time - File file = new File( repository.getBasedir(), "maven-metadata.xml" ); - System.gc(); // for Windows - file.delete(); - assertFalse( file.exists() ); - } - - public void testDiscoverMetadata() - throws DiscovererException - { - List metadataPaths = discoverer.discoverMetadata( repository, null ); - assertNotNull( "Check metadata not null", metadataPaths ); - - RepositoryMetadata metadata = - new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) ); - assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) ); - - metadata = - new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) ); - assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) ); - - metadata = new GroupRepositoryMetadata( "org.apache.maven" ); - assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) ); - } - - protected Artifact createArtifact( String groupId, String artifactId ) - { - return createArtifact( groupId, artifactId, "1.0" ); - } - - private Artifact createArtifact( String groupId, String artifactId, String version ) - { - return factory.createArtifact( groupId, artifactId, version, null, "jar" ); - } - - private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata ) - { - for ( Iterator i = metadataPaths.iterator(); i.hasNext(); ) - { - RepositoryMetadata m = (RepositoryMetadata) i.next(); - - if ( m.getGroupId().equals( metadata.getGroupId() ) ) - { - if ( m.getArtifactId() == null && metadata.getArtifactId() == null ) - { - return true; - } - else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) ) - { - return true; - } - } - } - return false; - } -} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java deleted file mode 100644 index d0b245474..000000000 --- a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/LegacyArtifactDiscovererTest.java +++ /dev/null @@ -1,537 +0,0 @@ -package org.apache.maven.archiva.discoverer; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter; -import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter; -import org.apache.maven.artifact.Artifact; -import org.codehaus.plexus.component.repository.exception.ComponentLookupException; - -import java.io.File; -import java.net.MalformedURLException; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; - -/** - * Test the legacy artifact discoverer. - * - * @author Brett Porter - * @version $Id:LegacyArtifactDiscovererTest.java 437105 2006-08-26 17:22:22 +1000 (Sat, 26 Aug 2006) brett $ - */ -public class LegacyArtifactDiscovererTest - extends AbstractArtifactDiscovererTest -{ - private static final List JAVAX_SQL_BLACKLIST = Collections.singletonList( "javax.sql/**" ); - - protected String getLayout() - { - return "legacy"; - } - - protected File getRepositoryFile() - { - return getTestFile( "src/test/legacy-repository" ); - } - - public void testDefaultExcludes() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( path.indexOf( "CVS" ) >= 0 ) - { - found = true; - assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not CVS", a.getFile().getPath().indexOf( "CVS" ) >= 0 ); - assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 ); - } - } - - public void testStandardExcludes() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "KEYS".equals( path ) ) - { - found = true; - assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) ); - } - } - - public void testBlacklistedExclude() - throws DiscovererException - { - List artifacts = discoverArtifactsWithBlacklist(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions", - dPath.getComment() ); - } - } - assertTrue( "Check exclusion was found", found ); - - assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - } - - public void testKickoutWithShortPath() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", - "Path does not match a legacy repository path for an artifact", dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithLongPath() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", - "Path does not match a legacy repository path for an artifact", dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithInvalidType() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithNoExtension() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path filename does not have an extension", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithWrongExtension() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path type does not match the extension", - dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) ); - } - } - - public void testKickoutWithNoVersion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - boolean found = false; - for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; ) - { - DiscovererPath dPath = (DiscovererPath) i.next(); - - String path = dPath.getPath(); - - if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) ) - { - found = true; - assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() ); - } - } - assertTrue( "Check kickout was found", found ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact a = (Artifact) i.next(); - assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) ); - } - } - - public void testInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) ); - } - - public void testTextualVersion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) ); - } - - public void testArtifactWithClassifier() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", - artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) ); - } - - public void testJavaSourcesInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( - createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) ); - } - - public void testDistributionInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check zip included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) ); - - assertTrue( "Check tar.gz included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) ); - } - - public void testSnapshotInclusion() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - assertTrue( "Check snapshot included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) ); - } - - public void testSnapshotExclusion() - throws DiscovererException - { - List artifacts = discoverArtifacts(); - assertNotNull( "Check artifacts not null", artifacts ); - - assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) ); - assertFalse( "Check snapshot included", - artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) ); - } - - public void testFileSet() - throws DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact artifact = (Artifact) i.next(); - assertNotNull( "Check file is set", artifact.getFile() ); - } - } - - public void testRepositorySet() - throws MalformedURLException, DiscovererException - { - List artifacts = discoverArtifactsWithSnapshots(); - assertNotNull( "Check artifacts not null", artifacts ); - - String url = repository.getUrl(); - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact artifact = (Artifact) i.next(); - assertNotNull( "Check repository set", artifact.getRepository() ); - assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() ); - } - } - - public void testWrongArtifactPackaging() - throws ComponentLookupException, DiscovererException - { - try - { - discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" ); - - fail( "Artifact should be null for wrong package extension" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testNoArtifactId() - throws DiscovererException - { - try - { - discoverer.buildArtifact( "groupId/jars/-1.0.jar" ); - - fail( "Artifact should be null when artifactId is missing" ); - } - catch ( DiscovererException e ) - { - // excellent - } - - try - { - discoverer.buildArtifact( "groupId/jars/1.0.jar" ); - - fail( "Artifact should be null when artifactId is missing" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testNoType() - throws ComponentLookupException, DiscovererException - { - try - { - discoverer.buildArtifact( "invalid/invalid/1/invalid-1" ); - - fail( "Artifact should be null for no type" ); - } - catch ( DiscovererException e ) - { - // excellent - } - } - - public void testSnapshot() - throws ComponentLookupException, DiscovererException - { - String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact ); - } - - public void testFinal() - throws ComponentLookupException, DiscovererException - { - String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact ); - } - - public void testNormal() - throws ComponentLookupException, DiscovererException - { - String testPath = "javax.sql/jars/jdbc-2.0.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact ); - } - - public void testJavadoc() - throws ComponentLookupException, DiscovererException - { - String testPath = "javax.sql/javadoc.jars/jdbc-2.0-javadoc.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "javadoc.jar", "javadoc" ), artifact ); - } - - public void testSources() - throws ComponentLookupException, DiscovererException - { - String testPath = "javax.sql/java-sources/jdbc-2.0-sources.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "javax.sql", "jdbc", "2.0", "java-source", "sources" ), artifact ); - } - - public void testPlugin() - throws ComponentLookupException, DiscovererException - { - String testPath = "maven/plugins/maven-test-plugin-1.8.jar"; - - Artifact artifact = discoverer.buildArtifact( testPath ); - - assertEquals( createArtifact( "maven", "maven-test-plugin", "1.8", "plugin" ), artifact ); - } - - - private List discoverArtifacts() - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, null, new SnapshotArtifactFilter() ); - } - - private List discoverArtifactsWithBlacklist() - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, JAVAX_SQL_BLACKLIST, new SnapshotArtifactFilter() ); - } - - private List discoverArtifactsWithSnapshots() - throws DiscovererException - { - return discoverer.discoverArtifacts( repository, null, new AcceptAllArtifactFilter() ); - } -} diff --git a/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java new file mode 100644 index 000000000..7f081c955 --- /dev/null +++ b/archiva-discoverer/src/test/java/org/apache/maven/archiva/discoverer/MockConsumer.java @@ -0,0 +1,75 @@ +/** + * + */ +package org.apache.maven.archiva.discoverer; + +import org.apache.maven.archiva.common.consumers.Consumer; +import org.apache.maven.archiva.common.consumers.ConsumerException; +import org.apache.maven.archiva.common.utils.BaseFile; +import org.apache.maven.artifact.repository.ArtifactRepository; + +import java.util.ArrayList; +import java.util.List; + +public class MockConsumer + implements Consumer +{ + private List excludePatterns = new ArrayList(); + + private List includePatterns = new ArrayList(); + + private List filesProcessed = new ArrayList(); + + private int countFileProblems = 0; + + public String getName() + { + return "MockConsumer (Testing Only)"; + } + + public boolean init( ArtifactRepository repository ) + { + return true; + } + + public void processFile( BaseFile file ) + throws ConsumerException + { + filesProcessed.add( file ); + } + + public void processFileProblem( BaseFile file, String message ) + { + countFileProblems++; + } + + public List getExcludePatterns() + { + return excludePatterns; + } + + public void setExcludePatterns( List excludePatterns ) + { + this.excludePatterns = excludePatterns; + } + + public List getIncludePatterns() + { + return includePatterns; + } + + public void setIncludePatterns( List includePatterns ) + { + this.includePatterns = includePatterns; + } + + public int getCountFileProblems() + { + return countFileProblems; + } + + public List getFilesProcessed() + { + return filesProcessed; + } +} \ No newline at end of file diff --git a/archiva-discoverer/src/test/repository/javax/maven-metadata.xml b/archiva-discoverer/src/test/repository/javax/maven-metadata.xml index caf5b6697..b3baf545d 100644 --- a/archiva-discoverer/src/test/repository/javax/maven-metadata.xml +++ b/archiva-discoverer/src/test/repository/javax/maven-metadata.xml @@ -18,6 +18,7 @@ ~ under the License. --> + javax.sql jdbc diff --git a/archiva-indexer/pom.xml b/archiva-indexer/pom.xml index 4f004c5a1..10d2c366e 100644 --- a/archiva-indexer/pom.xml +++ b/archiva-indexer/pom.xml @@ -27,7 +27,7 @@ 4.0.0 archiva-indexer - Archiva Repository Indexer + Archiva Indexer org.apache.maven diff --git a/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java b/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java index 9535a6c70..b4bdbd4f7 100644 --- a/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java +++ b/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/RepositoryArtifactIndex.java @@ -21,6 +21,7 @@ package org.apache.maven.archiva.indexer; import org.apache.maven.archiva.indexer.query.Query; import org.apache.maven.archiva.indexer.record.RepositoryIndexRecordFactory; +import org.apache.maven.artifact.Artifact; import java.util.Collection; import java.util.List; @@ -89,6 +90,17 @@ public interface RepositoryArtifactIndex Collection getAllRecordKeys() throws RepositoryIndexException; + /** + * Indexes the artifact specified. If the artifact is already in the repository they it is updated. + * This method should use less memory than indexRecords as the records can be created and disposed of on the fly. + * + * @param artifact the artifact to index + * @param factory the artifact to record factory + * @throws RepositoryIndexException if there is a problem indexing the artifacts + */ + void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory ) + throws RepositoryIndexException; + /** * Indexes the artifacts found within the specified list. If the artifacts are already in the * repository they are updated. This method should use less memory than indexRecords as the records can be diff --git a/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java b/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java index 5b5f68bea..c0a02935d 100644 --- a/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java +++ b/archiva-indexer/src/main/java/org/apache/maven/archiva/indexer/lucene/LuceneRepositoryArtifactIndex.java @@ -351,6 +351,40 @@ public class LuceneRepositoryArtifactIndex lastUpdatedTime = System.currentTimeMillis(); } } + + public void indexArtifact( Artifact artifact, RepositoryIndexRecordFactory factory ) + throws RepositoryIndexException + { + IndexModifier indexModifier = null; + try + { + indexModifier = new IndexModifier( indexLocation, getAnalyzer(), !exists() ); + + RepositoryIndexRecord record = factory.createRecord( artifact ); + + if ( record != null ) + { + Term term = new Term( FLD_PK, record.getPrimaryKey() ); + + indexModifier.deleteDocuments( term ); + + Document document = converter.convert( record ); + document.add( new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) ); + + indexModifier.addDocument( document ); + } + indexModifier.optimize(); + } + catch ( IOException e ) + { + throw new RepositoryIndexException( "Error updating index: " + e.getMessage(), e ); + } + finally + { + closeQuietly( indexModifier ); + lastUpdatedTime = System.currentTimeMillis(); + } + } public List getAllGroupIds() throws RepositoryIndexException diff --git a/archiva-proxy/pom.xml b/archiva-proxy/pom.xml index 8ab966dde..23488ee0a 100644 --- a/archiva-proxy/pom.xml +++ b/archiva-proxy/pom.xml @@ -31,11 +31,7 @@ org.apache.maven.archiva - archiva-discoverer - - - org.apache.maven - maven-artifact + archiva-common org.apache.maven.wagon diff --git a/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultProxyRequestHandler.java b/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultProxyRequestHandler.java index f4a77f0c7..a22fdb872 100644 --- a/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultProxyRequestHandler.java +++ b/archiva-proxy/src/main/java/org/apache/maven/archiva/proxy/DefaultProxyRequestHandler.java @@ -21,8 +21,8 @@ package org.apache.maven.archiva.proxy; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.maven.archiva.discoverer.ArtifactDiscoverer; -import org.apache.maven.archiva.discoverer.DiscovererException; +import org.apache.maven.archiva.common.artifact.builder.BuilderException; +import org.apache.maven.archiva.common.artifact.builder.LayoutArtifactBuilder; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -85,17 +85,17 @@ public class DefaultProxyRequestHandler * @plexus.requirement */ private ArtifactFactory factory; - + /** * @plexus.requirement role-hint="default" - * @todo use a map, and have priorities in them + * @todo use a map, and have priorities in them. */ - private ArtifactDiscoverer defaultArtifactDiscoverer; - + private LayoutArtifactBuilder defaultArtifactBuilder; + /** * @plexus.requirement role-hint="legacy" */ - private ArtifactDiscoverer legacyArtifactDiscoverer; + private LayoutArtifactBuilder legacyArtifactBuilder; /** * @plexus.requirement role="org.apache.maven.wagon.Wagon" @@ -159,10 +159,10 @@ public class DefaultProxyRequestHandler Artifact artifact = null; try { - artifact = defaultArtifactDiscoverer.buildArtifact( artifactPath ); + artifact = defaultArtifactBuilder.build( artifactPath ); getLogger().debug( "Artifact requested is: " + artifact ); } - catch ( DiscovererException e ) + catch ( BuilderException e ) { msg = "Failed to build artifact from path:\n\tfrom default: " + e.getMessage(); } @@ -171,10 +171,10 @@ public class DefaultProxyRequestHandler { try { - artifact = legacyArtifactDiscoverer.buildArtifact( artifactPath ); + artifact = legacyArtifactBuilder.build( artifactPath ); getLogger().debug( "Artifact requested is: " + artifact ); } - catch ( DiscovererException e ) + catch ( BuilderException e ) { getLogger().debug( msg + "\n\tfrom legacy: " + e.getMessage() ); } diff --git a/archiva-proxy/src/test/resources/org/apache/maven/archiva/proxy/ProxyRequestHandlerTest.xml b/archiva-proxy/src/test/resources/org/apache/maven/archiva/proxy/ProxyRequestHandlerTest.xml index 37c1a1e53..3e8ac0201 100644 --- a/archiva-proxy/src/test/resources/org/apache/maven/archiva/proxy/ProxyRequestHandlerTest.xml +++ b/archiva-proxy/src/test/resources/org/apache/maven/archiva/proxy/ProxyRequestHandlerTest.xml @@ -28,7 +28,6 @@ org.codehaus.plexus.logging.LoggerManager org.codehaus.plexus.logging.console.ConsoleLoggerManager basic - ERROR diff --git a/archiva-reports-standard/pom.xml b/archiva-reports-standard/pom.xml index 2f0dd0644..24eb19954 100755 --- a/archiva-reports-standard/pom.xml +++ b/archiva-reports-standard/pom.xml @@ -77,6 +77,41 @@ commons-io commons-io + + org.codehaus.plexus + plexus-jdo2 + 1.0-alpha-8 + + + xerces + xercesImpl + + + xerces + xmlParserAPIs + + + + + jpox + jpox + 1.1.6 + compile + + + + javax.sql + jdbc-stdext + + + + + + hsqldb + hsqldb + 1.7.3.3 + test + @@ -84,19 +119,45 @@ org.codehaus.modello modello-maven-plugin 1.0-alpha-14-SNAPSHOT + + 1.0.0 + false + src/main/mdo/reporting.mdo + + modello-java - xpp3-writer java + jpox-metadata-class + + + + + jpox-jdo-mapping + + jpox-jdo-mapping + + + ${basedir}/target/classes/org/apache/maven/archiva/reporting/model/ + + + + + + org.codehaus.mojo + jpox-maven-plugin + 1.1.6-SNAPSHOT + + + + enhance - - 1.0.0 - src/main/mdo/reporting.mdo - org.codehaus.mojo diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java new file mode 100644 index 000000000..2854befa7 --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/ReportingException.java @@ -0,0 +1,50 @@ +package org.apache.maven.archiva.reporting; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * ReportingException + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ReportingException + extends Exception +{ + + public ReportingException() + { + } + + public ReportingException( String message ) + { + super( message ); + } + + public ReportingException( Throwable cause ) + { + super( cause ); + } + + public ReportingException( String message, Throwable cause ) + { + super( message, cause ); + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java new file mode 100644 index 000000000..f474ef664 --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractJdoDatabase.java @@ -0,0 +1,237 @@ +package org.apache.maven.archiva.reporting.database; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.codehaus.plexus.jdo.JdoFactory; +import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; +import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; + +import java.util.List; + +import javax.jdo.Extent; +import javax.jdo.JDOException; +import javax.jdo.JDOHelper; +import javax.jdo.JDOObjectNotFoundException; +import javax.jdo.JDOUserException; +import javax.jdo.PersistenceManager; +import javax.jdo.PersistenceManagerFactory; +import javax.jdo.Query; +import javax.jdo.Transaction; + +/** + * AbstractJdoResults - Base class for all JDO related results. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractJdoDatabase + implements Initializable +{ + /** + * @plexus.requirement role-hint="archiva" + */ + private JdoFactory jdoFactory; + + private PersistenceManagerFactory pmf; + + // ------------------------------------------------------------------- + // JPOX / JDO Specifics. + // ------------------------------------------------------------------- + + protected List getAllObjects( Class clazz, String ordering ) + { + PersistenceManager pm = getPersistenceManager(); + Transaction tx = pm.currentTransaction(); + + try + { + tx.begin(); + + Extent extent = pm.getExtent( clazz, true ); + + Query query = pm.newQuery( extent ); + + if ( ordering != null ) + { + query.setOrdering( ordering ); + } + +// for ( Iterator i = fetchGroups.iterator(); i.hasNext(); ) +// { +// pm.getFetchPlan().addGroup( (String) i.next() ); +// } + + List result = (List) query.execute(); + + result = (List) pm.detachCopyAll( result ); + + tx.commit(); + + return result; + } + finally + { + rollbackIfActive( tx ); + } + } + + protected Object getObjectByKey( Class clazz, Object key ) + throws JDOObjectNotFoundException, JDOException + { + if ( key == null ) + { + throw new JDOException( "Unable to get object from jdo using null key." ); + } + + PersistenceManager pm = getPersistenceManager(); + Transaction tx = pm.currentTransaction(); + + try + { + tx.begin(); + + // if ( fetchGroup != null ) + // { + // pm.getFetchPlan().addGroup( fetchGroup ); + // } + + Object objectId = pm.newObjectIdInstance( clazz, key.toString() ); + + Object object = pm.getObjectById( objectId ); + + object = pm.detachCopy( object ); + + tx.commit(); + + return object; + } + finally + { + rollbackIfActive( tx ); + } + } + + public void initialize() + throws InitializationException + { + pmf = jdoFactory.getPersistenceManagerFactory(); + } + + protected void removeObject( Object o ) + { + PersistenceManager pm = getPersistenceManager(); + Transaction tx = pm.currentTransaction(); + + try + { + tx.begin(); + + o = pm.getObjectById( pm.getObjectId( o ) ); + + pm.deletePersistent( o ); + + tx.commit(); + } + finally + { + rollbackIfActive( tx ); + } + } + + protected Object saveObject( Object object ) + { + return saveObject( object, null ); + } + + protected Object saveObject( Object object, String fetchGroups[] ) + throws JDOException + { + PersistenceManager pm = getPersistenceManager(); + Transaction tx = pm.currentTransaction(); + + try + { + tx.begin(); + + if ( ( JDOHelper.getObjectId( object ) != null ) && !JDOHelper.isDetached( object ) ) + { + throw new JDOException( "Existing object is not detached: " + object ); + } + + if ( fetchGroups != null ) + { + for ( int i = 0; i >= fetchGroups.length; i++ ) + { + pm.getFetchPlan().addGroup( fetchGroups[i] ); + } + } + + pm.makePersistent( object ); + + object = pm.detachCopy( object ); + + tx.commit(); + + return object; + } + finally + { + rollbackIfActive( tx ); + } + } + + protected PersistenceManager getPersistenceManager() + { + PersistenceManager pm = pmf.getPersistenceManager(); + + pm.getFetchPlan().setMaxFetchDepth( -1 ); + + return pm; + } + + protected static void closePersistenceManager( PersistenceManager pm ) + { + try + { + pm.close(); + } + catch ( JDOUserException e ) + { + // ignore + } + } + + protected static void rollbackIfActive( Transaction tx ) + { + PersistenceManager pm = tx.getPersistenceManager(); + + try + { + if ( tx.isActive() ) + { + tx.rollback(); + } + } + finally + { + closePersistenceManager( pm ); + } + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java new file mode 100644 index 000000000..0eee93b4f --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/AbstractResultsDatabase.java @@ -0,0 +1,80 @@ +package org.apache.maven.archiva.reporting.database; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.maven.archiva.reporting.model.Result; + +/** + * AbstractResultsDatabase + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public abstract class AbstractResultsDatabase + extends AbstractJdoDatabase +{ + /** + *

+ * Get the number of failures in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of failures in the database. + */ + public abstract int getNumFailures(); + + /** + *

+ * Get the number of warnings in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of warnings in the database. + */ + public abstract int getNumWarnings(); + + /** + *

+ * Get the number of notices in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of notices in the database. + */ + public abstract int getNumNotices(); + + protected static Result createResult( String processor, String problem, String reason ) + { + Result result = new Result(); + result.setProcessor( processor ); + result.setProblem( problem ); + result.setReason( reason ); + return result; + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java new file mode 100644 index 000000000..4314372b0 --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ArtifactResultsDatabase.java @@ -0,0 +1,282 @@ +package org.apache.maven.archiva.reporting.database; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.reporting.model.ArtifactResults; +import org.apache.maven.archiva.reporting.model.ArtifactResultsKey; +import org.apache.maven.archiva.reporting.model.Result; +import org.apache.maven.artifact.Artifact; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import javax.jdo.JDOObjectNotFoundException; +import javax.jdo.PersistenceManager; +import javax.jdo.Query; +import javax.jdo.Transaction; + +/** + * ArtifactResultsDatabase - Database of ArtifactResults. + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase" + */ +public class ArtifactResultsDatabase + extends AbstractResultsDatabase +{ + // ------------------------------------------------------------------- + // ArtifactResults methods. + // ------------------------------------------------------------------- + + public static final String ROLE = ArtifactResultsDatabase.class.getName(); + + public void addFailure( Artifact artifact, String processor, String problem, String reason ) + { + ArtifactResults results = getArtifactResults( artifact ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getFailures().contains( result ) ) + { + results.addFailure( result ); + } + + saveObject( results ); + } + + public void addNotice( Artifact artifact, String processor, String problem, String reason ) + { + ArtifactResults results = getArtifactResults( artifact ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getNotices().contains( result ) ) + { + results.addNotice( result ); + } + + saveObject( results ); + } + + public void addWarning( Artifact artifact, String processor, String problem, String reason ) + { + ArtifactResults results = getArtifactResults( artifact ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getWarnings().contains( result ) ) + { + results.addWarning( result ); + } + + saveObject( results ); + } + + public void clearResults( ArtifactResults results ) + { + results.getFailures().clear(); + results.getWarnings().clear(); + results.getNotices().clear(); + + saveObject( results ); + } + + public List getAllArtifactResults() + { + return getAllObjects( ArtifactResults.class, null ); + } + + public Iterator getIterator() + { + List allartifacts = getAllArtifactResults(); + if ( allartifacts == null ) + { + return Collections.EMPTY_LIST.iterator(); + } + + return allartifacts.iterator(); + } + + public List findArtifactResults( String groupId, String artifactId, String version ) + { + PersistenceManager pm = getPersistenceManager(); + Transaction tx = pm.currentTransaction(); + + try + { + tx.begin(); + + Query query = pm.newQuery( "javax.jdo.query.JDOQL", "SELECT FROM " + ArtifactResults.class.getName() + + " WHERE groupId == findGroupId && " + " artifactId == findArtifactId && " + + " version == findVersionId" ); + query.declareParameters( "String findGroupId, String findArtifactId, String findVersionId" ); + query.setOrdering( "findArtifactId ascending" ); + + List result = (List) query.execute( groupId, artifactId, version ); + + result = (List) pm.detachCopyAll( result ); + + tx.commit(); + + return result; + } + finally + { + rollbackIfActive( tx ); + } + } + + public void remove( ArtifactResults results ) + { + removeObject( results ); + } + + public void remove( Artifact artifact ) + { + try + { + ArtifactResults results = lookupArtifactResults( artifact ); + remove( results ); + } + catch ( JDOObjectNotFoundException e ) + { + // nothing to do. + } + } + + /** + * Get an {@link ArtifactResults} from the store. + * If the store does not have one, create it. + * + * Equivalent to calling {@link #lookupArtifactResults(Artifact)} then if + * not found, using {@link #createArtifactResults(Artifact)}. + * + * @param artifact the artifact information + * @return the ArtifactResults object (may not be in database yet, so don't forget to {@link #saveObject(Object)}) + * @see #lookupArtifactResults(Artifact) + * @see #createArtifactResults(Artifact) + */ + public ArtifactResults getArtifactResults( Artifact artifact ) + { + ArtifactResults results; + + try + { + results = lookupArtifactResults( artifact ); + } + catch ( JDOObjectNotFoundException e ) + { + results = createArtifactResults( artifact ); + } + + return results; + } + + /** + * Create a new {@link ArtifactResults} object from the provided Artifact information. + * + * @param artifact the artifact information. + * @return the new {@link ArtifactResults} object. + * @see #getArtifactResults(Artifact) + * @see #lookupArtifactResults(Artifact) + */ + private ArtifactResults createArtifactResults( Artifact artifact ) + { + /* The funky StringUtils.defaultString() is used because of database constraints. + * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version, + * type, classifier. + * This also means that none of those fields may be null. however, that doesn't eliminate the + * ability to have an empty string in place of a null. + */ + + ArtifactResults results = new ArtifactResults(); + results.setGroupId( StringUtils.defaultString( artifact.getGroupId() ) ); + results.setArtifactId( StringUtils.defaultString( artifact.getArtifactId() ) ); + results.setVersion( StringUtils.defaultString( artifact.getVersion() ) ); + results.setType( StringUtils.defaultString( artifact.getType() ) ); + results.setClassifier( StringUtils.defaultString( artifact.getClassifier() ) ); + + return results; + } + + /** + * Lookup the {@link ArtifactResults} in the JDO store from the information in + * the provided Artifact. + * + * @param artifact the artifact information. + * @return the previously saved {@link ArtifactResults} from the JDO store. + * @throws JDOObjectNotFoundException if the {@link ArtifactResults} are not found. + * @see #getArtifactResults(Artifact) + * @see #createArtifactResults(Artifact) + */ + private ArtifactResults lookupArtifactResults( Artifact artifact ) + throws JDOObjectNotFoundException + { + /* The funky StringUtils.defaultString() is used because of database constraints. + * The ArtifactResults object has a complex primary key consisting of groupId, artifactId, version, + * type, classifier. + * This also means that none of those fields may be null. however, that doesn't eliminate the + * ability to have an empty string in place of a null. + */ + + ArtifactResultsKey key = new ArtifactResultsKey(); + key.groupId = StringUtils.defaultString( artifact.getGroupId() ); + key.artifactId = StringUtils.defaultString( artifact.getArtifactId() ); + key.version = StringUtils.defaultString( artifact.getVersion() ); + key.type = StringUtils.defaultString( artifact.getType() ); + key.classifier = StringUtils.defaultString( artifact.getClassifier() ); + + return (ArtifactResults) getObjectByKey( ArtifactResults.class, key ); + } + + public int getNumFailures() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + ArtifactResults results = (ArtifactResults) it.next(); + count += results.getFailures().size(); + } + return count; + } + + public int getNumNotices() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + ArtifactResults results = (ArtifactResults) it.next(); + count += results.getNotices().size(); + } + return count; + } + + public int getNumWarnings() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + ArtifactResults results = (ArtifactResults) it.next(); + count += results.getWarnings().size(); + } + return count; + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java new file mode 100644 index 000000000..82e62f1ee --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/MetadataResultsDatabase.java @@ -0,0 +1,209 @@ +package org.apache.maven.archiva.reporting.database; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; +import org.apache.maven.archiva.reporting.model.MetadataResults; +import org.apache.maven.archiva.reporting.model.MetadataResultsKey; +import org.apache.maven.archiva.reporting.model.Result; +import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import javax.jdo.JDOObjectNotFoundException; + +/** + * MetadataResultsDatabase + * + * @author Joakim Erdfelt + * @version $Id$ + * + * @plexus.component role="org.apache.maven.archiva.reporting.database.MetadataResultsDatabase" + */ +public class MetadataResultsDatabase + extends AbstractResultsDatabase +{ + public static final String ROLE = MetadataResultsDatabase.class.getName(); + + public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason ) + { + MetadataResults results = getMetadataResults( metadata ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getFailures().contains( result ) ) + { + results.addFailure( result ); + } + + saveObject( results ); + } + + public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason ) + { + MetadataResults results = getMetadataResults( metadata ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getWarnings().contains( result ) ) + { + results.addWarning( result ); + } + + saveObject( results ); + } + + public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason ) + { + MetadataResults results = getMetadataResults( metadata ); + Result result = createResult( processor, problem, reason ); + + if ( !results.getNotices().contains( result ) ) + { + results.addNotice( result ); + } + + saveObject( results ); + } + + public void clearResults( MetadataResults results ) + { + results.getFailures().clear(); + results.getWarnings().clear(); + results.getNotices().clear(); + + saveObject( results ); + } + + public List getAllMetadataResults() + { + return getAllObjects( MetadataResults.class, null ); + } + + public Iterator getIterator() + { + List allmetadatas = getAllMetadataResults(); + if ( allmetadatas == null ) + { + return Collections.EMPTY_LIST.iterator(); + } + + return allmetadatas.iterator(); + } + + public void remove( MetadataResults results ) + { + removeObject( results ); + } + + public void remove( RepositoryMetadata metadata ) + { + try + { + MetadataResults results = lookupMetadataResults( metadata ); + remove( results ); + } + catch ( JDOObjectNotFoundException e ) + { + // nothing to do. + } + } + + public MetadataResults getMetadataResults( RepositoryMetadata metadata ) + { + MetadataResults results; + + try + { + results = lookupMetadataResults( metadata ); + } + catch ( JDOObjectNotFoundException e ) + { + results = createMetadataResults( metadata ); + } + + return results; + } + + private MetadataResults createMetadataResults( RepositoryMetadata metadata ) + { + /* The funky StringUtils.defaultString() is used because of database constraints. + * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version. + * This also means that none of those fields may be null. however, that doesn't eliminate the + * ability to have an empty string in place of a null. + */ + + MetadataResults results = new MetadataResults(); + results.setGroupId( StringUtils.defaultString( metadata.getGroupId() ) ); + results.setArtifactId( StringUtils.defaultString( metadata.getArtifactId() ) ); + results.setVersion( StringUtils.defaultString( metadata.getBaseVersion() ) ); + + return results; + } + + private MetadataResults lookupMetadataResults( RepositoryMetadata metadata ) + { + /* The funky StringUtils.defaultString() is used because of database constraints. + * The MetadataResults object has a complex primary key consisting of groupId, artifactId, and version. + * This also means that none of those fields may be null. however, that doesn't eliminate the + * ability to have an empty string in place of a null. + */ + + MetadataResultsKey key = new MetadataResultsKey(); + key.groupId = StringUtils.defaultString( metadata.getGroupId(), "" ); + key.artifactId = StringUtils.defaultString( metadata.getArtifactId(), "" ); + key.version = StringUtils.defaultString( metadata.getBaseVersion(), "" ); + + return (MetadataResults) getObjectByKey( MetadataResults.class, key ); + } + + public int getNumFailures() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + MetadataResults results = (MetadataResults) it.next(); + count += results.getFailures().size(); + } + return count; + } + + public int getNumNotices() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + MetadataResults results = (MetadataResults) it.next(); + count += results.getNotices().size(); + } + return count; + } + + public int getNumWarnings() + { + int count = 0; + for ( Iterator it = getIterator(); it.hasNext(); ) + { + MetadataResults results = (MetadataResults) it.next(); + count += results.getWarnings().size(); + } + return count; + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java index 201ce4ba4..4dd2430a5 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/database/ReportingDatabase.java @@ -19,618 +19,106 @@ package org.apache.maven.archiva.reporting.database; * under the License. */ -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.model.ArtifactResults; -import org.apache.maven.archiva.reporting.model.MetadataResults; -import org.apache.maven.archiva.reporting.model.Reporting; -import org.apache.maven.archiva.reporting.model.Result; -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; - -import java.util.Date; -import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Set; /** + * The Main Reporting Database. + * * @todo i18n, including message formatting and parameterisation + * @plexus.component role="org.apache.maven.archiva.reporting.database.ReportingDatabase" */ public class ReportingDatabase { - private final Reporting reporting; - - private Map artifactMap; - - private Map metadataMap; - - private int numFailures; - - private int numWarnings; - - private ArtifactRepository repository; - - private boolean inProgress; - - private long startTime; - - private final ReportGroup reportGroup; - - private Set metadataWithProblems; - - private Map filteredDatabases = new HashMap(); - - private int numNotices; - - public ReportingDatabase( ReportGroup reportGroup ) - { - this( reportGroup, new Reporting() ); - } - - public ReportingDatabase( ReportGroup reportGroup, Reporting reporting ) - { - this( reportGroup, reporting, null ); - } - - public ReportingDatabase( ReportGroup reportGroup, ArtifactRepository repository ) - { - this( reportGroup, new Reporting(), repository ); - } - - public ReportingDatabase( ReportGroup reportGroup, Reporting reporting, ArtifactRepository repository ) - { - this.reportGroup = reportGroup; - - this.reporting = reporting; - - this.repository = repository; - - initArtifactMap(); - - initMetadataMap(); - } - - public void addFailure( Artifact artifact, String processor, String problem, String reason ) - { - ArtifactResults results = getArtifactResults( artifact ); - Result result = createResult( processor, problem, reason ); - if ( !results.getFailures().contains( result ) ) - { - results.addFailure( result ); - numFailures++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addFailure( artifact, processor, problem, reason ); - } - } - - public void addNotice( Artifact artifact, String processor, String problem, String reason ) - { - ArtifactResults results = getArtifactResults( artifact ); - Result result = createResult( processor, problem, reason ); - if ( !results.getNotices().contains( result ) ) - { - results.addNotice( result ); - numNotices++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addNotice( artifact, processor, problem, reason ); - } - } - - public void addWarning( Artifact artifact, String processor, String problem, String reason ) - { - ArtifactResults results = getArtifactResults( artifact ); - Result result = createResult( processor, problem, reason ); - if ( !results.getWarnings().contains( result ) ) - { - results.addWarning( result ); - numWarnings++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addWarning( artifact, processor, problem, reason ); - } - } - - ArtifactResults getArtifactResults( Artifact artifact ) - { - return getArtifactResults( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), - artifact.getType(), artifact.getClassifier() ); - } - - private ArtifactResults getArtifactResults( String groupId, String artifactId, String version, String type, - String classifier ) - { - Map artifactMap = this.artifactMap; - - String key = getArtifactKey( groupId, artifactId, version, type, classifier ); - ArtifactResults results = (ArtifactResults) artifactMap.get( key ); - if ( results == null ) - { - results = new ArtifactResults(); - results.setArtifactId( artifactId ); - results.setClassifier( classifier ); - results.setGroupId( groupId ); - results.setType( type ); - results.setVersion( version ); - - artifactMap.put( key, results ); - reporting.getArtifacts().add( results ); - } - - return results; - } - - private void initArtifactMap() - { - Map map = new HashMap(); - for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); ) - { - ArtifactResults result = (ArtifactResults) i.next(); - - String key = getArtifactKey( result.getGroupId(), result.getArtifactId(), result.getVersion(), - result.getType(), result.getClassifier() ); - map.put( key, result ); - - numFailures += result.getFailures().size(); - numWarnings += result.getWarnings().size(); - numNotices += result.getNotices().size(); - } - artifactMap = map; - } - - private static String getArtifactKey( String groupId, String artifactId, String version, String type, - String classifier ) - { - return groupId + ":" + artifactId + ":" + version + ":" + type + ":" + classifier; - } - - private static Result createResult( String processor, String problem, String reason ) - { - Result result = new Result(); - result.setProcessor( processor ); - result.setProblem( problem ); - result.setReason( reason ); - return result; - } - - public void addFailure( RepositoryMetadata metadata, String processor, String problem, String reason ) - { - MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() ); - if ( !metadataWithProblems.contains( results ) ) - { - metadataWithProblems.add( results ); - } - Result result = createResult( processor, problem, reason ); - if ( !results.getFailures().contains( result ) ) - { - results.addFailure( result ); - numFailures++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addFailure( metadata, processor, problem, reason ); - } - } - - public void addWarning( RepositoryMetadata metadata, String processor, String problem, String reason ) - { - MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() ); - if ( !metadataWithProblems.contains( results ) ) - { - metadataWithProblems.add( results ); - } - Result result = createResult( processor, problem, reason ); - if ( !results.getWarnings().contains( result ) ) - { - results.addWarning( result ); - numWarnings++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addWarning( metadata, processor, problem, reason ); - } - } - - public void addNotice( RepositoryMetadata metadata, String processor, String problem, String reason ) - { - MetadataResults results = getMetadataResults( metadata, System.currentTimeMillis() ); - if ( !metadataWithProblems.contains( results ) ) - { - metadataWithProblems.add( results ); - } - Result result = createResult( processor, problem, reason ); - if ( !results.getNotices().contains( result ) ) - { - results.addNotice( result ); - numNotices++; - } - updateTimings(); - - if ( filteredDatabases.containsKey( problem ) ) - { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( problem ); - - reportingDatabase.addNotice( metadata, processor, problem, reason ); - } - } - - public Set getMetadataWithProblems() - { - return metadataWithProblems; - } - - private void initMetadataMap() - { - Map map = new HashMap(); - Set problems = new LinkedHashSet(); - - for ( Iterator i = reporting.getMetadata().iterator(); i.hasNext(); ) - { - MetadataResults result = (MetadataResults) i.next(); - - String key = getMetadataKey( result.getGroupId(), result.getArtifactId(), result.getVersion() ); - - map.put( key, result ); - - numFailures += result.getFailures().size(); - numWarnings += result.getWarnings().size(); - numNotices += result.getNotices().size(); - - if ( !result.getFailures().isEmpty() || !result.getWarnings().isEmpty() || !result.getNotices().isEmpty() ) - { - problems.add( result ); - } - } - metadataMap = map; - metadataWithProblems = problems; - } + public static final String ROLE = ReportingDatabase.class.getName(); - private static String getMetadataKey( String groupId, String artifactId, String version ) - { - return groupId + ":" + artifactId + ":" + version; - } - - public int getNumFailures() - { - return numFailures; - } - - public int getNumWarnings() - { - return numWarnings; - } + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase artifactDatabase; - public Reporting getReporting() - { - return reporting; - } + /** + * @plexus.requirement + */ + private MetadataResultsDatabase metadataDatabase; public Iterator getArtifactIterator() { - return reporting.getArtifacts().iterator(); + return artifactDatabase.getIterator(); } public Iterator getMetadataIterator() { - return reporting.getMetadata().iterator(); + return metadataDatabase.getIterator(); } - public boolean isMetadataUpToDate( RepositoryMetadata metadata, long timestamp ) + public void clear() { - String key = getMetadataKey( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion() ); - Map map = metadataMap; - MetadataResults results = (MetadataResults) map.get( key ); - return results != null && results.getLastModified() >= timestamp; } /** - * Make sure the metadata record exists, but remove any previous reports in preparation for adding new ones. - * - * @param metadata the metadata - * @param lastModified the modification time of the file being tracked + *

+ * Get the number of failures in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of failures in the database. */ - public void cleanMetadata( RepositoryMetadata metadata, long lastModified ) - { - MetadataResults results = getMetadataResults( metadata, lastModified ); - - results.setLastModified( lastModified ); - - numFailures -= results.getFailures().size(); - results.getFailures().clear(); - - numWarnings -= results.getWarnings().size(); - results.getWarnings().clear(); - - numNotices -= results.getWarnings().size(); - results.getNotices().clear(); - - metadataWithProblems.remove( results ); - } - - MetadataResults getMetadataResults( RepositoryMetadata metadata, long lastModified ) - { - return getMetadataResults( metadata.getGroupId(), metadata.getArtifactId(), metadata.getBaseVersion(), - lastModified ); - } - - private MetadataResults getMetadataResults( String groupId, String artifactId, String baseVersion, - long lastModified ) - { - String key = getMetadataKey( groupId, artifactId, baseVersion ); - Map metadataMap = this.metadataMap; - MetadataResults results = (MetadataResults) metadataMap.get( key ); - if ( results == null ) - { - results = new MetadataResults(); - results.setArtifactId( artifactId ); - results.setGroupId( groupId ); - results.setVersion( baseVersion ); - results.setLastModified( lastModified ); - - metadataMap.put( key, results ); - reporting.getMetadata().add( results ); - } - return results; - } - - public void removeArtifact( Artifact artifact ) - { - Map map = artifactMap; - - String key = getArtifactKey( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), - artifact.getType(), artifact.getClassifier() ); - ArtifactResults results = (ArtifactResults) map.get( key ); - if ( results != null ) - { - for ( Iterator i = reporting.getArtifacts().iterator(); i.hasNext(); ) - { - if ( results.equals( i.next() ) ) - { - i.remove(); - } - } - - numFailures -= results.getFailures().size(); - numWarnings -= results.getWarnings().size(); - numNotices -= results.getNotices().size(); - - map.remove( key ); - } - } - - public ArtifactRepository getRepository() - { - return repository; - } - - public boolean isInProgress() - { - return inProgress; - } - - public void setInProgress( boolean inProgress ) - { - this.inProgress = inProgress; - - if ( inProgress ) - { - startTime = System.currentTimeMillis(); - } - } - - public void clear() - { - // clear the values rather than destroy the instance so that the "inProgress" indicator is in tact. - numWarnings = 0; - numNotices = 0; - numFailures = 0; - - artifactMap.clear(); - metadataMap.clear(); - metadataWithProblems.clear(); - filteredDatabases.clear(); - - reporting.getArtifacts().clear(); - reporting.getMetadata().clear(); - - updateTimings(); - } - - public void setStartTime( long startTime ) - { - this.startTime = startTime; - } - - public long getStartTime() - { - return startTime; - } - - public void updateTimings() - { - long startTime = getStartTime(); - Date endTime = new Date(); - if ( startTime > 0 ) - { - getReporting().setExecutionTime( endTime.getTime() - startTime ); - } - getReporting().setLastModified( endTime.getTime() ); - } - - public ReportGroup getReportGroup() + public int getNumFailures() { - return reportGroup; + int count = 0; + count += artifactDatabase.getNumFailures(); + count += metadataDatabase.getNumFailures(); + return count; } - public ReportingDatabase getFilteredDatabase( String filter ) + /** + *

+ * Get the number of notices in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of notices in the database. + */ + public int getNumNotices() { - ReportingDatabase reportingDatabase = (ReportingDatabase) filteredDatabases.get( filter ); - - if ( reportingDatabase == null ) - { - reportingDatabase = new ReportingDatabase( reportGroup, repository ); - - Reporting reporting = reportingDatabase.getReporting(); - reporting.setExecutionTime( this.reporting.getExecutionTime() ); - reporting.setLastModified( this.reporting.getLastModified() ); - - for ( Iterator i = this.reporting.getArtifacts().iterator(); i.hasNext(); ) - { - ArtifactResults results = (ArtifactResults) i.next(); - ArtifactResults targetResults = null; - for ( Iterator j = results.getFailures().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createArtifactResults( reportingDatabase, results ); - } - - targetResults.addFailure( result ); - reportingDatabase.numFailures++; - } - } - for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createArtifactResults( reportingDatabase, results ); - } - - targetResults.addWarning( result ); - reportingDatabase.numWarnings++; - } - } - for ( Iterator j = results.getNotices().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createArtifactResults( reportingDatabase, results ); - } - - targetResults.addNotice( result ); - reportingDatabase.numNotices++; - } - } - } - for ( Iterator i = this.reporting.getMetadata().iterator(); i.hasNext(); ) - { - MetadataResults results = (MetadataResults) i.next(); - MetadataResults targetResults = null; - for ( Iterator j = results.getFailures().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createMetadataResults( reportingDatabase, results ); - } - - targetResults.addFailure( result ); - reportingDatabase.numFailures++; - } - } - for ( Iterator j = results.getWarnings().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createMetadataResults( reportingDatabase, results ); - } - - targetResults.addWarning( result ); - reportingDatabase.numWarnings++; - } - } - for ( Iterator j = results.getNotices().iterator(); j.hasNext(); ) - { - Result result = (Result) j.next(); - - if ( filter.equals( result.getProcessor() ) ) - { - if ( targetResults == null ) - { - // lazily create so it is not added unless it has to be - targetResults = createMetadataResults( reportingDatabase, results ); - } - - targetResults.addNotice( result ); - reportingDatabase.numNotices++; - } - } - } - - filteredDatabases.put( filter, reportingDatabase ); - } - - return reportingDatabase; + int count = 0; + count += artifactDatabase.getNumNotices(); + count += metadataDatabase.getNumNotices(); + return count; } - private static MetadataResults createMetadataResults( ReportingDatabase reportingDatabase, MetadataResults results ) + /** + *

+ * Get the number of warnings in the database. + *

+ * + *

+ * WARNING: This is a very resource intensive request. Use sparingly. + *

+ * + * @return the number of warnings in the database. + */ + public int getNumWarnings() { - MetadataResults targetResults = reportingDatabase.getMetadataResults( results.getGroupId(), - results.getArtifactId(), - results.getVersion(), - results.getLastModified() ); - reportingDatabase.metadataWithProblems.add( targetResults ); - return targetResults; + int count = 0; + count += artifactDatabase.getNumWarnings(); + count += metadataDatabase.getNumWarnings(); + return count; } - private static ArtifactResults createArtifactResults( ReportingDatabase reportingDatabase, ArtifactResults results ) + public ArtifactResultsDatabase getArtifactDatabase() { - return reportingDatabase.getArtifactResults( results.getGroupId(), results.getArtifactId(), - results.getVersion(), results.getType(), results.getClassifier() ); + return artifactDatabase; } - public int getNumNotices() + public MetadataResultsDatabase getMetadataDatabase() { - return numNotices; + return metadataDatabase; } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java deleted file mode 100644 index 8bc4b9dd9..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/DefaultReportExecutor.java +++ /dev/null @@ -1,247 +0,0 @@ -package org.apache.maven.archiva.reporting.executor; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.ArtifactDiscoverer; -import org.apache.maven.archiva.discoverer.DiscovererException; -import org.apache.maven.archiva.discoverer.MetadataDiscoverer; -import org.apache.maven.archiva.discoverer.filter.AcceptAllMetadataFilter; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.store.ReportingStore; -import org.apache.maven.archiva.reporting.store.ReportingStoreException; -import org.apache.maven.artifact.Artifact; -import org.apache.maven.artifact.InvalidArtifactRTException; -import org.apache.maven.artifact.factory.ArtifactFactory; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; -import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout; -import org.apache.maven.artifact.repository.layout.LegacyRepositoryLayout; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; -import org.apache.maven.model.Model; -import org.apache.maven.project.MavenProject; -import org.apache.maven.project.MavenProjectBuilder; -import org.apache.maven.project.ProjectBuildingException; -import org.codehaus.plexus.logging.AbstractLogEnabled; - -import java.io.File; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * Report executor implementation. - * - * @todo should the report set be limitable by configuration? - * @plexus.component - */ -public class DefaultReportExecutor - extends AbstractLogEnabled - implements ReportExecutor -{ - /** - * @plexus.requirement - */ - private MavenProjectBuilder projectBuilder; - - /** - * @plexus.requirement - */ - private ReportingStore reportingStore; - - /** - * @plexus.requirement - */ - private ArtifactFactory artifactFactory; - - /** - * @plexus.requirement role="org.apache.maven.archiva.discoverer.ArtifactDiscoverer" - */ - private Map artifactDiscoverers; - - /** - * @plexus.requirement role="org.apache.maven.archiva.discoverer.MetadataDiscoverer" - */ - private Map metadataDiscoverers; - - private static final int ARTIFACT_BUFFER_SIZE = 1000; - - public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository ) - throws ReportingStoreException - { - ReportingDatabase reporter = getReportDatabase( repository, reportGroup ); - - for ( Iterator i = metadata.iterator(); i.hasNext(); ) - { - RepositoryMetadata repositoryMetadata = (RepositoryMetadata) i.next(); - - File file = - new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) ); - reporter.cleanMetadata( repositoryMetadata, file.lastModified() ); - - reportGroup.processMetadata( repositoryMetadata, repository, reporter ); - } - - reportingStore.storeReports( reporter, repository ); - } - - public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository ) - throws ReportingStoreException - { - ReportingDatabase reporter = getReportDatabase( repository, reportGroup ); - - for ( Iterator i = artifacts.iterator(); i.hasNext(); ) - { - Artifact artifact = (Artifact) i.next(); - - Model model = null; - try - { - Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(), - artifact.getArtifactId(), - artifact.getVersion() ); - MavenProject project = - projectBuilder.buildFromRepository( pomArtifact, Collections.EMPTY_LIST, repository ); - - model = project.getModel(); - } - catch ( InvalidArtifactRTException e ) - { - reporter.addWarning( artifact, null, null, "Invalid artifact [" + artifact + "] : " + e ); - } - catch ( ProjectBuildingException e ) - { - reporter.addWarning( artifact, null, null, "Error reading project model: " + e ); - } - - reporter.removeArtifact( artifact ); - - reportGroup.processArtifact( artifact, model, reporter ); - } - - reportingStore.storeReports( reporter, repository ); - } - - public ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup ) - throws ReportingStoreException - { - getLogger().debug( - "Reading previous report database " + reportGroup.getName() + " from repository " + repository.getId() ); - return reportingStore.getReportsFromStore( repository, reportGroup ); - } - - public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns, - ArtifactFilter filter ) - throws DiscovererException, ReportingStoreException - { - // Flush (as in toilet, not store) the report database - ReportingDatabase database = getReportDatabase( repository, reportGroup ); - database.clear(); - - // Discovery process - String layoutProperty = getRepositoryLayout( repository.getLayout() ); - ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty ); - - // Save some memory by not tracking paths we won't use - // TODO: Plexus CDC should be able to inject this configuration - discoverer.setTrackOmittedPaths( false ); - - List artifacts = discoverer.discoverArtifacts( repository, blacklistedPatterns, filter ); - - if ( !artifacts.isEmpty() ) - { - getLogger().info( "Discovered " + artifacts.size() + " artifacts" ); - - // Work through these in batches, then flush the project cache. - for ( int j = 0; j < artifacts.size(); j += ARTIFACT_BUFFER_SIZE ) - { - int end = j + ARTIFACT_BUFFER_SIZE; - List currentArtifacts = artifacts.subList( j, end > artifacts.size() ? artifacts.size() : end ); - - // TODO: proper queueing of this in case it was triggered externally (not harmful to do so at present, but not optimal) - - // run the reports. - runArtifactReports( reportGroup, currentArtifacts, repository ); - - // MNG-142 - the project builder retains a lot of objects in its inflexible cache. This is a hack - // around that. TODO: remove when it is configurable - flushProjectBuilderCacheHack(); - } - } - - MetadataDiscoverer metadataDiscoverer = (MetadataDiscoverer) metadataDiscoverers.get( layoutProperty ); - List metadata = - metadataDiscoverer.discoverMetadata( repository, blacklistedPatterns, new AcceptAllMetadataFilter() ); - - if ( !metadata.isEmpty() ) - { - getLogger().info( "Discovered " + metadata.size() + " metadata files" ); - - // run the reports - runMetadataReports( reportGroup, metadata, repository ); - } - } - - private String getRepositoryLayout( ArtifactRepositoryLayout layout ) - { - // gross limitation that there is no reverse lookup of the hint for the layout. - if ( layout.getClass().equals( DefaultRepositoryLayout.class ) ) - { - return "default"; - } - else if ( layout.getClass().equals( LegacyRepositoryLayout.class ) ) - { - return "legacy"; - } - else - { - throw new IllegalArgumentException( "Unknown layout: " + layout ); - } - } - - private void flushProjectBuilderCacheHack() - { - try - { - if ( projectBuilder != null ) - { - java.lang.reflect.Field f = projectBuilder.getClass().getDeclaredField( "rawProjectCache" ); - f.setAccessible( true ); - Map cache = (Map) f.get( projectBuilder ); - cache.clear(); - - f = projectBuilder.getClass().getDeclaredField( "processedProjectCache" ); - f.setAccessible( true ); - cache = (Map) f.get( projectBuilder ); - cache.clear(); - } - } - catch ( NoSuchFieldException e ) - { - throw new RuntimeException( e ); - } - catch ( IllegalAccessException e ) - { - throw new RuntimeException( e ); - } - } -} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java deleted file mode 100644 index d6f7b5d03..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/executor/ReportExecutor.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.apache.maven.archiva.reporting.executor; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.DiscovererException; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.store.ReportingStoreException; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; - -import java.util.List; - -/** - * Executes a report or report group. - */ -public interface ReportExecutor -{ - /** - * Plexus component role name. - */ - String ROLE = ReportExecutor.class.getName(); - - /** - * Run reports on a set of metadata. - * - * @param reportGroup the report set to run - * @param metadata the RepositoryMetadata objects to report on - * @param repository the repository that they come from - * @throws org.apache.maven.archiva.reporting.store.ReportingStoreException - * if there is a problem reading/writing the report database - */ - public void runMetadataReports( ReportGroup reportGroup, List metadata, ArtifactRepository repository ) - throws ReportingStoreException; - - /** - * Run reports on a set of artifacts. - * - * @param reportGroup the report set to run - * @param artifacts the Artifact objects to report on - * @param repository the repository that they come from - * @throws ReportingStoreException if there is a problem reading/writing the report database - */ - public void runArtifactReports( ReportGroup reportGroup, List artifacts, ArtifactRepository repository ) - throws ReportingStoreException; - - /** - * Get the report database in use for a given repository. - * - * @param repository the repository - * @param reportGroup the report set to run - * @return the report database - * @throws ReportingStoreException if there is a problem reading the report database - */ - ReportingDatabase getReportDatabase( ArtifactRepository repository, ReportGroup reportGroup ) - throws ReportingStoreException; - - /** - * Run the artifact and metadata reports for the repository. The artifacts and metadata will be discovered. - * - * @param repository the repository to run from - * @param blacklistedPatterns the patterns to exclude during discovery - * @param filter the filter to use during discovery to get a consistent list of artifacts - * @param reportGroup the report set to run - * @throws ReportingStoreException if there is a problem reading/writing the report database - * @throws org.apache.maven.archiva.discoverer.DiscovererException - * if there is a problem finding the artifacts and metadata to report on - */ - public void runReports( ReportGroup reportGroup, ArtifactRepository repository, List blacklistedPatterns, - ArtifactFilter filter ) - throws DiscovererException, ReportingStoreException; -} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java deleted file mode 100644 index f1158ad11..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/filter/ReportingMetadataFilter.java +++ /dev/null @@ -1,43 +0,0 @@ -package org.apache.maven.archiva.reporting.filter; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.discoverer.filter.MetadataFilter; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; - -/** - * Implementation of a reporting filter. Artifacts already in the database are ignored. - */ -public class ReportingMetadataFilter - implements MetadataFilter -{ - private final ReportingDatabase reporter; - - public ReportingMetadataFilter( ReportingDatabase reporter ) - { - this.reporter = reporter; - } - - public boolean include( RepositoryMetadata metadata, long timestamp ) - { - return !reporter.isMetadataUpToDate( metadata, timestamp ); - } -} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java index 2051f7d4b..4e5acdddb 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/AbstractReportGroup.java @@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.archiva.reporting.processor.ArtifactReportProcessor; import org.apache.maven.archiva.reporting.processor.MetadataReportProcessor; import org.apache.maven.artifact.Artifact; @@ -46,7 +45,7 @@ public abstract class AbstractReportGroup */ private Map metadataReports; - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase ) + public void processArtifact( Artifact artifact, Model model ) { for ( Iterator i = artifactReports.entrySet().iterator(); i.hasNext(); ) { @@ -56,13 +55,12 @@ public abstract class AbstractReportGroup { ArtifactReportProcessor report = (ArtifactReportProcessor) entry.getValue(); - report.processArtifact( artifact, model, reportingDatabase ); + report.processArtifact( artifact, model ); } } } - public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository, - ReportingDatabase reportingDatabase ) + public void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository ) { for ( Iterator i = metadataReports.entrySet().iterator(); i.hasNext(); ) { @@ -72,7 +70,7 @@ public abstract class AbstractReportGroup { MetadataReportProcessor report = (MetadataReportProcessor) entry.getValue(); - report.processMetadata( repositoryMetadata, repository, reportingDatabase ); + report.processMetadata( repositoryMetadata, repository ); } } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java index df26343ac..58b8e2f63 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/DefaultReportGroup.java @@ -25,7 +25,8 @@ import java.util.Map; /** * The default report set, for repository health. * - * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" role-hint="health" + * @plexus.component role="org.apache.maven.archiva.reporting.group.ReportGroup" + * role-hint="health" * @todo could these report groups be assembled dynamically by configuration rather than as explicit components? eg, reportGroup.addReport( ARP ), reportGroup.addReport( MRP ) */ public class DefaultReportGroup @@ -62,9 +63,4 @@ public class DefaultReportGroup { return "Repository Health"; } - - public String getFilename() - { - return "health-report.xml"; - } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java index 30fe412e8..fa34b2233 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/group/ReportGroup.java @@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.group; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; @@ -43,19 +42,16 @@ public interface ReportGroup * * @param artifact the artifact to process * @param model the POM associated with the artifact to process - * @param reportingDatabase the report database to store results in */ - void processArtifact( Artifact artifact, Model model, ReportingDatabase reportingDatabase ); + void processArtifact( Artifact artifact, Model model ); /** * Run any metadata related reports in the report set. * * @param repositoryMetadata the metadata to process * @param repository the repository the metadata is located in - * @param reportingDatabase the report database to store results in */ - void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository, - ReportingDatabase reportingDatabase ); + void processMetadata( RepositoryMetadata repositoryMetadata, ArtifactRepository repository ); /** * Whether a report with the given role hint is included in this report set. @@ -79,11 +75,4 @@ public interface ReportGroup * @return the report name */ String getName(); - - /** - * Get the filename of the reports within the repository's reports directory. - * - * @return the filename - */ - String getFilename(); } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java new file mode 100644 index 000000000..349447930 --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/ArtifactResultsKey.java @@ -0,0 +1,158 @@ +package org.apache.maven.archiva.reporting.model; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; + +import java.io.Serializable; + +/** + * ArtifactResultsKey - used by jpox for application identity for the {@link ArtifactResults} object and table. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class ArtifactResultsKey + implements Serializable +{ + public String groupId = ""; + + public String artifactId = ""; + + public String version = ""; + + public String type = ""; + + public String classifier = ""; + + public ArtifactResultsKey() + { + /* do nothing */ + } + + public ArtifactResultsKey( String key ) + { + String parts[] = StringUtils.splitPreserveAllTokens( key, ':' ); + groupId = parts[0]; + artifactId = parts[1]; + version = parts[2]; + type = parts[3]; + classifier = parts[4]; + } + + public String toString() + { + return StringUtils.join( new String[] { groupId, artifactId, version, type, classifier }, ':' ); + } + + public int hashCode() + { + final int PRIME = 31; + int result = 1; + result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() ); + result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() ); + result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() ); + result = PRIME * result + ( ( type == null ) ? 0 : type.hashCode() ); + result = PRIME * result + ( ( classifier == null ) ? 0 : classifier.hashCode() ); + return result; + } + + public boolean equals( Object obj ) + { + if ( this == obj ) + { + return true; + } + + if ( obj == null ) + { + return false; + } + + if ( getClass() != obj.getClass() ) + { + return false; + } + + final ArtifactResultsKey other = (ArtifactResultsKey) obj; + + if ( groupId == null ) + { + if ( other.groupId != null ) + { + return false; + } + } + else if ( !groupId.equals( other.groupId ) ) + { + return false; + } + + if ( artifactId == null ) + { + if ( other.artifactId != null ) + { + return false; + } + } + else if ( !artifactId.equals( other.artifactId ) ) + { + return false; + } + + if ( version == null ) + { + if ( other.version != null ) + { + return false; + } + } + else if ( !version.equals( other.version ) ) + { + return false; + } + + if ( type == null ) + { + if ( other.type != null ) + { + return false; + } + } + else if ( !type.equals( other.type ) ) + { + return false; + } + + if ( classifier == null ) + { + if ( other.classifier != null ) + { + return false; + } + } + else if ( !classifier.equals( other.classifier ) ) + { + return false; + } + + return true; + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java new file mode 100644 index 000000000..aeaff3bde --- /dev/null +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/model/MetadataResultsKey.java @@ -0,0 +1,126 @@ +package org.apache.maven.archiva.reporting.model; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.commons.lang.StringUtils; + +import java.io.Serializable; + +/** + * MetadataResultsKey - used by jpox for application identity for the {@link MetadataResults} object and table. + * + * @author Joakim Erdfelt + * @version $Id$ + */ +public class MetadataResultsKey + implements Serializable +{ + public String groupId = ""; + + public String artifactId = ""; + + public String version = ""; + + public MetadataResultsKey() + { + /* do nothing */ + } + + public MetadataResultsKey( String key ) + { + String parts[] = StringUtils.splitPreserveAllTokens( key, ':' ); + groupId = parts[0]; + artifactId = parts[1]; + version = parts[2]; + } + + public String toString() + { + return StringUtils.join( new String[] { groupId, artifactId, version }, ':' ); + } + + public int hashCode() + { + final int PRIME = 31; + int result = 1; + result = PRIME * result + ( ( groupId == null ) ? 0 : groupId.hashCode() ); + result = PRIME * result + ( ( artifactId == null ) ? 0 : artifactId.hashCode() ); + result = PRIME * result + ( ( version == null ) ? 0 : version.hashCode() ); + return result; + } + + public boolean equals( Object obj ) + { + if ( this == obj ) + { + return true; + } + + if ( obj == null ) + { + return false; + } + + if ( getClass() != obj.getClass() ) + { + return false; + } + + final ArtifactResultsKey other = (ArtifactResultsKey) obj; + + if ( groupId == null ) + { + if ( other.groupId != null ) + { + return false; + } + } + else if ( !groupId.equals( other.groupId ) ) + { + return false; + } + + if ( artifactId == null ) + { + if ( other.artifactId != null ) + { + return false; + } + } + else if ( !artifactId.equals( other.artifactId ) ) + { + return false; + } + + if ( version == null ) + { + if ( other.version != null ) + { + return false; + } + } + else if ( !version.equals( other.version ) ) + { + return false; + } + + return true; + } +} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java index 79b7d53f0..52e63c5e7 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ArtifactReportProcessor.java @@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.model.Model; @@ -31,5 +30,5 @@ public interface ArtifactReportProcessor { String ROLE = ArtifactReportProcessor.class.getName(); - void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ); + void processArtifact( Artifact artifact, Model model ); } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java index 7660dfcb0..04a31af7e 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/BadMetadataReportProcessor.java @@ -22,7 +22,7 @@ package org.apache.maven.archiva.reporting.processor; import org.apache.commons.lang.StringUtils; import org.apache.maven.archiva.layer.RepositoryQueryLayer; import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -61,6 +61,11 @@ public class BadMetadataReportProcessor */ private RepositoryQueryLayerFactory repositoryQueryLayerFactory; + /** + * @plexus.requirement + */ + private MetadataResultsDatabase database; + private static final String ROLE_HINT = "bad-metadata"; /** @@ -70,18 +75,17 @@ public class BadMetadataReportProcessor * @param repository the repository where the metadata was encountered * @param reporter the ReportingDatabase to receive processing results */ - public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository ) { if ( metadata.storedInGroupDirectory() ) { try { - checkPluginMetadata( metadata, repository, reporter ); + checkPluginMetadata( metadata, repository ); } catch ( IOException e ) { - addWarning( reporter, metadata, null, "Error getting plugin artifact directories versions: " + e ); + addWarning( metadata, null, "Error getting plugin artifact directories versions: " + e ); } } else @@ -98,36 +102,34 @@ public class BadMetadataReportProcessor } if ( !found ) { - addFailure( reporter, metadata, "missing-last-updated", - "Missing lastUpdated element inside the metadata." ); + addFailure( metadata, "missing-last-updated", "Missing lastUpdated element inside the metadata." ); } if ( metadata.storedInArtifactVersionDirectory() ) { - checkSnapshotMetadata( metadata, repository, reporter ); + checkSnapshotMetadata( metadata, repository ); } else { - checkMetadataVersions( metadata, repository, reporter ); + checkMetadataVersions( metadata, repository ); try { - checkRepositoryVersions( metadata, repository, reporter ); + checkRepositoryVersions( metadata, repository ); } catch ( IOException e ) { String reason = "Error getting plugin artifact directories versions: " + e; - addWarning( reporter, metadata, null, reason ); + addWarning( metadata, null, reason ); } } } } - private static void addWarning( ReportingDatabase reporter, RepositoryMetadata metadata, String problem, - String reason ) + private void addWarning( RepositoryMetadata metadata, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addWarning( metadata, ROLE_HINT, problem, reason ); + database.addWarning( metadata, ROLE_HINT, problem, reason ); } /** @@ -137,12 +139,11 @@ public class BadMetadataReportProcessor * @param repository the repository where the metadata was encountered * @param reporter the ReportingDatabase to receive processing results */ - private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + private void checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository ) throws IOException { - File metadataDir = - new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile(); + File metadataDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ) + .getParentFile(); List pluginDirs = getArtifactIdFiles( metadataDir ); Map prefixes = new HashMap(); @@ -153,22 +154,22 @@ public class BadMetadataReportProcessor String artifactId = plugin.getArtifactId(); if ( artifactId == null || artifactId.length() == 0 ) { - addFailure( reporter, metadata, "missing-artifact-id:" + plugin.getPrefix(), + addFailure( metadata, "missing-artifact-id:" + plugin.getPrefix(), "Missing or empty artifactId in group metadata for plugin " + plugin.getPrefix() ); } String prefix = plugin.getPrefix(); if ( prefix == null || prefix.length() == 0 ) { - addFailure( reporter, metadata, "missing-plugin-prefix:" + artifactId, + addFailure( metadata, "missing-plugin-prefix:" + artifactId, "Missing or empty plugin prefix for artifactId " + artifactId + "." ); } else { if ( prefixes.containsKey( prefix ) ) { - addFailure( reporter, metadata, "duplicate-plugin-prefix:" + prefix, - "Duplicate plugin prefix found: " + prefix + "." ); + addFailure( metadata, "duplicate-plugin-prefix:" + prefix, "Duplicate plugin prefix found: " + + prefix + "." ); } else { @@ -181,8 +182,8 @@ public class BadMetadataReportProcessor File pluginDir = new File( metadataDir, artifactId ); if ( !pluginDirs.contains( pluginDir ) ) { - addFailure( reporter, metadata, "missing-plugin-from-repository:" + artifactId, - "Metadata plugin " + artifactId + " not found in the repository" ); + addFailure( metadata, "missing-plugin-from-repository:" + artifactId, "Metadata plugin " + + artifactId + " not found in the repository" ); } else { @@ -196,8 +197,8 @@ public class BadMetadataReportProcessor for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); ) { File plugin = (File) plugins.next(); - addFailure( reporter, metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + - plugin.getName() + " is present in the repository but " + "missing in the metadata." ); + addFailure( metadata, "missing-plugin-from-metadata:" + plugin.getName(), "Plugin " + plugin.getName() + + " is present in the repository but " + "missing in the metadata." ); } } } @@ -209,27 +210,26 @@ public class BadMetadataReportProcessor * @param repository the repository where the metadata was encountered * @param reporter the ReportingDatabase to receive processing results */ - private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + private void checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository ) { - RepositoryQueryLayer repositoryQueryLayer = - repositoryQueryLayerFactory.createRepositoryQueryLayer( repository ); + RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository ); Versioning versioning = metadata.getMetadata().getVersioning(); if ( versioning != null ) { Snapshot snapshot = versioning.getSnapshot(); - String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, - snapshot.getTimestamp() + "-" + snapshot.getBuildNumber() ); - Artifact artifact = - artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version ); + String version = StringUtils.replace( metadata.getBaseVersion(), Artifact.SNAPSHOT_VERSION, snapshot + .getTimestamp() + + "-" + snapshot.getBuildNumber() ); + Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), + version ); artifact.isSnapshot(); // trigger baseVersion correction if ( !repositoryQueryLayer.containsArtifact( artifact ) ) { - addFailure( reporter, metadata, "missing-snapshot-artifact-from-repository:" + version, - "Snapshot artifact " + version + " does not exist." ); + addFailure( metadata, "missing-snapshot-artifact-from-repository:" + version, "Snapshot artifact " + + version + " does not exist." ); } } } @@ -241,11 +241,9 @@ public class BadMetadataReportProcessor * @param repository the repository where the metadata was encountered * @param reporter the ReportingDatabase to receive processing results */ - private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + private void checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository ) { - RepositoryQueryLayer repositoryQueryLayer = - repositoryQueryLayerFactory.createRepositoryQueryLayer( repository ); + RepositoryQueryLayer repositoryQueryLayer = repositoryQueryLayerFactory.createRepositoryQueryLayer( repository ); Versioning versioning = metadata.getMetadata().getVersioning(); if ( versioning != null ) @@ -254,13 +252,13 @@ public class BadMetadataReportProcessor { String version = (String) versions.next(); - Artifact artifact = - artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata.getArtifactId(), version ); + Artifact artifact = artifactFactory.createProjectArtifact( metadata.getGroupId(), metadata + .getArtifactId(), version ); if ( !repositoryQueryLayer.containsArtifact( artifact ) ) { - addFailure( reporter, metadata, "missing-artifact-from-repository:" + version, "Artifact version " + - version + " is present in metadata but " + "missing in the repository." ); + addFailure( metadata, "missing-artifact-from-repository:" + version, "Artifact version " + version + + " is present in metadata but " + "missing in the repository." ); } } } @@ -275,14 +273,13 @@ public class BadMetadataReportProcessor * @param reporter the ReportingDatabase to receive processing results * @throws java.io.IOException if there is a problem reading from the file system */ - private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + private void checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository ) throws IOException { Versioning versioning = metadata.getMetadata().getVersioning(); List metadataVersions = versioning != null ? versioning.getVersions() : Collections.EMPTY_LIST; - File versionsDir = - new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile(); + File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ) + .getParentFile(); // TODO: I don't know how this condition can happen, but it was seen on the main repository. // Avoid hard failure @@ -295,14 +292,14 @@ public class BadMetadataReportProcessor String version = path.getParentFile().getName(); if ( !metadataVersions.contains( version ) ) { - addFailure( reporter, metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + - version + " found in the repository but " + "missing in the metadata." ); + addFailure( metadata, "missing-artifact-from-metadata:" + version, "Artifact version " + version + + " found in the repository but " + "missing in the metadata." ); } } } else { - addFailure( reporter, metadata, null, "Metadata's directory did not exist: " + versionsDir ); + addFailure( metadata, null, "Metadata's directory did not exist: " + versionsDir ); } } @@ -339,10 +336,9 @@ public class BadMetadataReportProcessor return artifactIdFiles; } - private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem, - String reason ) + private void addFailure( RepositoryMetadata metadata, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( metadata, ROLE_HINT, problem, reason ); + database.addFailure( metadata, ROLE_HINT, problem, reason ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java index 36b60cc2e..8bd5e141f 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumArtifactReportProcessor.java @@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor; */ import org.apache.commons.io.FileUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Model; @@ -48,10 +48,15 @@ public class ChecksumArtifactReportProcessor * @plexus.requirement role-hint="md5" */ private Digester md5Digester; + + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase database; private static final String ROLE_HINT = "checksum"; - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); @@ -68,11 +73,11 @@ public class ChecksumArtifactReportProcessor // TODO: make md5 configurable // verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact ); - verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact ); + verifyChecksum( repository, path + ".sha1", file, sha1Digester, artifact ); } private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester, - ReportingDatabase reporter, Artifact artifact ) + Artifact artifact ) { File checksumFile = new File( repository.getBasedir(), path ); if ( checksumFile.exists() ) @@ -83,23 +88,23 @@ public class ChecksumArtifactReportProcessor } catch ( DigesterException e ) { - addFailure( reporter, artifact, "checksum-wrong", e.getMessage() ); + addFailure( artifact, "checksum-wrong", e.getMessage() ); } catch ( IOException e ) { - addFailure( reporter, artifact, "checksum-io-exception", "Read file error: " + e.getMessage() ); + addFailure( artifact, "checksum-io-exception", "Read file error: " + e.getMessage() ); } } else { - addFailure( reporter, artifact, "checksum-missing", + addFailure( artifact, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." ); } } - private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addFailure( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( artifact, ROLE_HINT, problem, reason ); + database.addFailure( artifact, ROLE_HINT, problem, reason ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java index 1f0e860a7..84313dc52 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/ChecksumMetadataReportProcessor.java @@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor; */ import org.apache.commons.io.FileUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.MetadataResultsDatabase; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; import org.codehaus.plexus.digest.Digester; @@ -48,32 +48,36 @@ public class ChecksumMetadataReportProcessor */ private Digester md5Digester; + /** + * @plexus.requirement + */ + private MetadataResultsDatabase database; + private static final String ROLE_HINT = "checksum-metadata"; /** * Validate the checksums of the metadata. Get the metadata file from the * repository then validate the checksum. */ - public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, - ReportingDatabase reporter ) + public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository ) { if ( !"file".equals( repository.getProtocol() ) ) { // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. - throw new UnsupportedOperationException( - "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); + throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl() + + "'. Only file based repositories are supported" ); } //check if checksum files exist String path = repository.pathOfRemoteRepositoryMetadata( metadata ); File file = new File( repository.getBasedir(), path ); - verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata ); - verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata ); + verifyChecksum( repository, path + ".md5", file, md5Digester, metadata ); + verifyChecksum( repository, path + ".sha1", file, sha1Digester, metadata ); } private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester, - ReportingDatabase reporter, RepositoryMetadata metadata ) + RepositoryMetadata metadata ) { File checksumFile = new File( repository.getBasedir(), path ); if ( checksumFile.exists() ) @@ -84,25 +88,23 @@ public class ChecksumMetadataReportProcessor } catch ( DigesterException e ) { - addFailure( reporter, metadata, "checksum-wrong", e.getMessage() ); + addFailure( metadata, "checksum-wrong", e.getMessage() ); } catch ( IOException e ) { - addFailure( reporter, metadata, "checksum-io-exception", "Read file error: " + e.getMessage() ); + addFailure( metadata, "checksum-io-exception", "Read file error: " + e.getMessage() ); } } else { - addFailure( reporter, metadata, "checksum-missing", - digester.getAlgorithm() + " checksum file does not exist." ); + addFailure( metadata, "checksum-missing", digester.getAlgorithm() + " checksum file does not exist." ); } } - private static void addFailure( ReportingDatabase reporter, RepositoryMetadata metadata, String problem, - String reason ) + private void addFailure( RepositoryMetadata metadata, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( metadata, ROLE_HINT, problem, reason ); + database.addFailure( metadata, ROLE_HINT, problem, reason ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java index 2d5bf80b4..049767c2d 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DependencyArtifactReportProcessor.java @@ -21,7 +21,7 @@ package org.apache.maven.archiva.reporting.processor; import org.apache.maven.archiva.layer.RepositoryQueryLayer; import org.apache.maven.archiva.layer.RepositoryQueryLayerFactory; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException; @@ -49,34 +49,39 @@ public class DependencyArtifactReportProcessor */ private RepositoryQueryLayerFactory layerFactory; + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase database; + private static final String POM = "pom"; private static final String ROLE_HINT = "dependency"; - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { RepositoryQueryLayer queryLayer = layerFactory.createRepositoryQueryLayer( artifact.getRepository() ); if ( !queryLayer.containsArtifact( artifact ) ) { // TODO: is this even possible? - addFailure( reporter, artifact, "missing-artifact", "Artifact does not exist in the repository" ); + addFailure( artifact, "missing-artifact", "Artifact does not exist in the repository" ); } if ( model != null && POM.equals( artifact.getType() ) ) { List dependencies = model.getDependencies(); - processDependencies( dependencies, reporter, queryLayer, artifact ); + processDependencies( dependencies, queryLayer, artifact ); } } - private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addFailure( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( artifact, ROLE_HINT, problem, reason ); + database.addFailure( artifact, ROLE_HINT, problem, reason ); } - private void processDependencies( List dependencies, ReportingDatabase reporter, - RepositoryQueryLayer repositoryQueryLayer, Artifact sourceArtifact ) + private void processDependencies( List dependencies, RepositoryQueryLayer repositoryQueryLayer, + Artifact sourceArtifact ) { if ( dependencies.size() > 0 ) { @@ -100,19 +105,19 @@ public class DependencyArtifactReportProcessor if ( !repositoryQueryLayer.containsArtifact( artifact ) ) { - String reason = MessageFormat.format( - "Artifact''s dependency {0} does not exist in the repository", - new String[]{getDependencyString( dependency )} ); - addFailure( reporter, sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), - reason ); + String reason = MessageFormat + .format( "Artifact''s dependency {0} does not exist in the repository", + new String[] { getDependencyString( dependency ) } ); + addFailure( sourceArtifact, "missing-dependency:" + getDependencyKey( dependency ), reason ); } } catch ( InvalidVersionSpecificationException e ) { String reason = MessageFormat.format( "Artifact''s dependency {0} contains an invalid version {1}", - new String[]{getDependencyString( dependency ), - dependency.getVersion()} ); - addFailure( reporter, sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason ); + new String[] { + getDependencyString( dependency ), + dependency.getVersion() } ); + addFailure( sourceArtifact, "bad-version:" + getDependencyKey( dependency ), reason ); } } } @@ -156,7 +161,7 @@ public class DependencyArtifactReportProcessor } return artifactFactory.createDependencyArtifact( dependency.getGroupId(), dependency.getArtifactId(), spec, - dependency.getType(), dependency.getClassifier(), - dependency.getScope() ); + dependency.getType(), dependency.getClassifier(), dependency + .getScope() ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java index 4c15eab45..5dff8c50e 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/DuplicateArtifactFileReportProcessor.java @@ -27,7 +27,7 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.lucene.LuceneQuery; import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord; import org.apache.maven.archiva.indexer.record.StandardIndexRecordFields; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Model; @@ -62,9 +62,14 @@ public class DuplicateArtifactFileReportProcessor */ private String indexDirectory; + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase database; + private static final String ROLE_HINT = "duplicate"; - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); if ( artifact.getFile() != null ) @@ -82,16 +87,16 @@ public class DuplicateArtifactFileReportProcessor } catch ( DigesterException e ) { - addWarning( reporter, artifact, null, - "Unable to generate checksum for " + artifact.getFile() + ": " + e ); + addWarning( artifact, null, "Unable to generate checksum for " + artifact.getFile() + ": " + e ); } if ( checksum != null ) { try { - List results = index.search( new LuceneQuery( - new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) ); + List results = index + .search( new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum + .toLowerCase() ) ) ) ); if ( !results.isEmpty() ) { @@ -106,8 +111,7 @@ public class DuplicateArtifactFileReportProcessor String groupId = artifact.getGroupId(); if ( groupId.equals( result.getGroupId() ) ) { - addFailure( reporter, artifact, "duplicate", - "Found duplicate for " + artifact.getId() ); + addFailure( artifact, "duplicate", "Found duplicate for " + artifact.getId() ); } } } @@ -115,25 +119,25 @@ public class DuplicateArtifactFileReportProcessor } catch ( RepositoryIndexSearchException e ) { - addWarning( reporter, artifact, null, "Failed to search in index" + e ); + addWarning( artifact, null, "Failed to search in index" + e ); } } } else { - addWarning( reporter, artifact, null, "Artifact file is null" ); + addWarning( artifact, null, "Artifact file is null" ); } } - private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addFailure( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( artifact, ROLE_HINT, problem, reason ); + database.addFailure( artifact, ROLE_HINT, problem, reason ); } - private static void addWarning( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addWarning( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addWarning( artifact, ROLE_HINT, problem, reason ); + database.addWarning( artifact, ROLE_HINT, problem, reason ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java index 1d4a6a64e..05977be86 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/InvalidPomArtifactReportProcessor.java @@ -20,7 +20,7 @@ package org.apache.maven.archiva.reporting.processor; */ import org.apache.commons.io.IOUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Model; @@ -43,19 +43,24 @@ public class InvalidPomArtifactReportProcessor { private static final String ROLE_HINT = "invalid-pom"; + /** + * @plexus.requirement + */ + private ArtifactResultsDatabase database; + /** * @param artifact The pom xml file to be validated, passed as an artifact object. * @param reporter The artifact reporter object. */ - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); if ( !"file".equals( repository.getProtocol() ) ) { // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. - throw new UnsupportedOperationException( - "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); + throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl() + + "'. Only file based repositories are supported" ); } if ( "pom".equals( artifact.getType().toLowerCase() ) ) @@ -64,7 +69,7 @@ public class InvalidPomArtifactReportProcessor if ( !f.exists() ) { - addFailure( reporter, artifact, "pom-missing", "POM not found." ); + addFailure( artifact, "pom-missing", "POM not found." ); } else { @@ -79,13 +84,12 @@ public class InvalidPomArtifactReportProcessor } catch ( XmlPullParserException e ) { - addFailure( reporter, artifact, "pom-parse-exception", + addFailure( artifact, "pom-parse-exception", "The pom xml file is not well-formed. Error while parsing: " + e.getMessage() ); } catch ( IOException e ) { - addFailure( reporter, artifact, "pom-io-exception", - "Error while reading the pom xml file: " + e.getMessage() ); + addFailure( artifact, "pom-io-exception", "Error while reading the pom xml file: " + e.getMessage() ); } finally { @@ -95,9 +99,9 @@ public class InvalidPomArtifactReportProcessor } } - private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addFailure( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( artifact, ROLE_HINT, problem, reason ); + database.addFailure( artifact, ROLE_HINT, problem, reason ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java index cfd594451..a8f5129b1 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/LocationArtifactReportProcessor.java @@ -20,14 +20,13 @@ package org.apache.maven.archiva.reporting.processor; */ import org.apache.commons.io.IOUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.handler.DefaultArtifactHandler; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; -import org.apache.maven.project.MavenProjectBuilder; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import java.io.File; @@ -57,13 +56,19 @@ public class LocationArtifactReportProcessor private ArtifactFactory artifactFactory; // TODO: share with other code with the same - private static final Set JAR_FILE_TYPES = - new HashSet( Arrays.asList( new String[]{"jar", "war", "par", "ejb", "ear", "rar", "sar"} ) ); + private static final Set JAR_FILE_TYPES = new HashSet( Arrays.asList( new String[] { + "jar", + "war", + "par", + "ejb", + "ear", + "rar", + "sar" } ) ); /** * @plexus.requirement */ - private MavenProjectBuilder projectBuilder; + private ArtifactResultsDatabase database; private static final String POM = "pom"; @@ -77,15 +82,15 @@ public class LocationArtifactReportProcessor * location is valid based on the location specified in the pom. Check if the both the location * specified in the file system pom and in the pom included in the package is the same. */ - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); if ( !"file".equals( repository.getProtocol() ) ) { // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. - throw new UnsupportedOperationException( - "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); + throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl() + + "'. Only file based repositories are supported" ); } adjustDistributionArtifactHandler( artifact ); @@ -100,19 +105,16 @@ public class LocationArtifactReportProcessor { //check if the artifact is located in its proper location based on the info //specified in the model object/pom - Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), - model.getArtifactId(), - model.getVersion(), - artifact.getType(), - artifact.getClassifier() ); + Artifact modelArtifact = artifactFactory.createArtifactWithClassifier( model.getGroupId(), model + .getArtifactId(), model.getVersion(), artifact.getType(), artifact.getClassifier() ); adjustDistributionArtifactHandler( modelArtifact ); String modelPath = repository.pathOf( modelArtifact ); if ( !modelPath.equals( artifactPath ) ) { - addFailure( reporter, artifact, "repository-pom-location", - "The artifact is out of place. It does not match the specified location in the repository pom: " + - modelPath ); + addFailure( artifact, "repository-pom-location", + "The artifact is out of place. It does not match the specified location in the repository pom: " + + modelPath ); } } } @@ -126,7 +128,7 @@ public class LocationArtifactReportProcessor { //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself //check if the pom is included in the package - Model extractedModel = readArtifactModel( file, artifact, reporter ); + Model extractedModel = readArtifactModel( file, artifact ); if ( extractedModel != null ) { @@ -136,7 +138,7 @@ public class LocationArtifactReportProcessor extractedModel.getPackaging() ); if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) ) { - addFailure( reporter, artifact, "packaged-pom-location", + addFailure( artifact, "packaged-pom-location", "The artifact is out of place. It does not match the specified location in the packaged pom." ); } } @@ -144,15 +146,14 @@ public class LocationArtifactReportProcessor } else { - addFailure( reporter, artifact, "missing-artifact", - "The artifact file [" + file + "] cannot be found for metadata." ); + addFailure( artifact, "missing-artifact", "The artifact file [" + file + "] cannot be found for metadata." ); } } - private static void addFailure( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addFailure( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addFailure( artifact, ROLE_HINT, problem, reason ); + database.addFailure( artifact, ROLE_HINT, problem, reason ); } private static void adjustDistributionArtifactHandler( Artifact artifact ) @@ -168,7 +169,7 @@ public class LocationArtifactReportProcessor } } - private Model readArtifactModel( File file, Artifact artifact, ReportingDatabase reporter ) + private Model readArtifactModel( File file, Artifact artifact ) { Model model = null; @@ -178,8 +179,8 @@ public class LocationArtifactReportProcessor jar = new JarFile( file ); //Get the entry and its input stream. - JarEntry entry = jar.getJarEntry( - "META-INF/maven/" + artifact.getGroupId() + "/" + artifact.getArtifactId() + "/pom.xml" ); + JarEntry entry = jar.getJarEntry( "META-INF/maven/" + artifact.getGroupId() + "/" + + artifact.getArtifactId() + "/pom.xml" ); // If the entry is not null, extract it. if ( entry != null ) @@ -198,11 +199,11 @@ public class LocationArtifactReportProcessor } catch ( IOException e ) { - addWarning( reporter, artifact, "Unable to read artifact to extract model: " + e ); + addWarning( artifact, "Unable to read artifact to extract model: " + e ); } catch ( XmlPullParserException e ) { - addWarning( reporter, artifact, "Unable to parse extracted model: " + e ); + addWarning( artifact, "Unable to parse extracted model: " + e ); } finally { @@ -222,10 +223,10 @@ public class LocationArtifactReportProcessor return model; } - private static void addWarning( ReportingDatabase reporter, Artifact artifact, String reason ) + private void addWarning( Artifact artifact, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addWarning( artifact, ROLE_HINT, null, reason ); + database.addWarning( artifact, ROLE_HINT, null, reason ); } private Model readModel( InputStream entryStream ) diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java index dc6790abc..ad8465028 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/MetadataReportProcessor.java @@ -19,7 +19,6 @@ package org.apache.maven.archiva.reporting.processor; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; @@ -30,5 +29,5 @@ public interface MetadataReportProcessor { String ROLE = MetadataReportProcessor.class.getName(); - void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ReportingDatabase reporter ); + void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository ); } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java index 21bd7b4dc..b5f0817cb 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldArtifactReportProcessor.java @@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.handler.DefaultArtifactHandler; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -44,8 +44,15 @@ public class OldArtifactReportProcessor * @plexus.configuration default-value="31536000" */ private int maxAge; + + /** + * TODO: Must create an 'Old Artifact' database. + * TODO: Base this off of an artifact table query instead. + * @plexus.requirement + */ + private ArtifactResultsDatabase database; - public void processArtifact( Artifact artifact, Model model, ReportingDatabase reporter ) + public void processArtifact( Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); @@ -68,7 +75,7 @@ public class OldArtifactReportProcessor if ( System.currentTimeMillis() - file.lastModified() > maxAge * 1000 ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addNotice( artifact, ROLE_HINT, "old-artifact", + database.addNotice( artifact, ROLE_HINT, "old-artifact", "The artifact is older than the maximum age of " + maxAge + " seconds." ); } } diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java index 04720762d..02c2cecb0 100644 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java +++ b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/processor/OldSnapshotArtifactReportProcessor.java @@ -19,7 +19,7 @@ package org.apache.maven.archiva.reporting.processor; * under the License. */ -import org.apache.maven.archiva.reporting.database.ReportingDatabase; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.handler.DefaultArtifactHandler; import org.apache.maven.artifact.repository.ArtifactRepository; @@ -62,15 +62,22 @@ public class OldSnapshotArtifactReportProcessor */ private int maxSnapshots; - public void processArtifact( final Artifact artifact, Model model, ReportingDatabase reporter ) + /** + * TODO: Must create an 'Old Artifact' database. + * TODO: Base this off of an artifact table query instead. + * @plexus.requirement + */ + private ArtifactResultsDatabase database; + + public void processArtifact( final Artifact artifact, Model model ) { ArtifactRepository repository = artifact.getRepository(); if ( !"file".equals( repository.getProtocol() ) ) { // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. - throw new UnsupportedOperationException( - "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); + throw new UnsupportedOperationException( "Can't process repository '" + repository.getUrl() + + "'. Only file based repositories are supported" ); } adjustDistributionArtifactHandler( artifact ); @@ -95,13 +102,14 @@ public class OldSnapshotArtifactReportProcessor catch ( ParseException e ) { throw new IllegalStateException( - "Shouldn't match timestamp pattern and not be able to parse it: " + m.group( 2 ) ); + "Shouldn't match timestamp pattern and not be able to parse it: " + + m.group( 2 ) ); } if ( System.currentTimeMillis() - timestamp > maxAge * 1000 ) { - addNotice( reporter, artifact, "snapshot-expired-time", - "The artifact is older than the maximum age of " + maxAge + " seconds." ); + addNotice( artifact, "snapshot-expired-time", "The artifact is older than the maximum age of " + + maxAge + " seconds." ); } else if ( maxSnapshots > 0 ) { @@ -109,12 +117,12 @@ public class OldSnapshotArtifactReportProcessor { public boolean accept( File file, String string ) { - return string.startsWith( artifact.getArtifactId() + "-" ) && - string.endsWith( "." + artifact.getArtifactHandler().getExtension() ); + return string.startsWith( artifact.getArtifactId() + "-" ) + && string.endsWith( "." + artifact.getArtifactHandler().getExtension() ); } } ); - List/**/ buildNumbers = new ArrayList(); + List/**/buildNumbers = new ArrayList(); Integer currentBuild = null; for ( Iterator i = Arrays.asList( files ).iterator(); i.hasNext(); ) { @@ -149,7 +157,7 @@ public class OldSnapshotArtifactReportProcessor if ( buildNumbers.contains( currentBuild ) ) { - addNotice( reporter, artifact, "snapshot-expired-count", + addNotice( artifact, "snapshot-expired-count", "The artifact is older than the maximum number of retained snapshot builds." ); } } @@ -162,10 +170,10 @@ public class OldSnapshotArtifactReportProcessor } } - private static void addNotice( ReportingDatabase reporter, Artifact artifact, String problem, String reason ) + private void addNotice( Artifact artifact, String problem, String reason ) { // TODO: reason could be an i18n key derived from the processor and the problem ID and the - reporter.addNotice( artifact, ROLE_HINT, problem, reason ); + database.addNotice( artifact, ROLE_HINT, problem, reason ); } private static void adjustDistributionArtifactHandler( Artifact artifact ) diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java deleted file mode 100644 index 0d3c81366..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/DefaultReportingStore.java +++ /dev/null @@ -1,143 +0,0 @@ -package org.apache.maven.archiva.reporting.store; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.commons.io.IOUtils; -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.model.Reporting; -import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Reader; -import org.apache.maven.archiva.reporting.model.io.xpp3.ReportingXpp3Writer; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.codehaus.plexus.logging.AbstractLogEnabled; -import org.codehaus.plexus.util.xml.pull.XmlPullParserException; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -/** - * Load and store the reports. No synchronization is used, but it is unnecessary as the old object - * can continue to be used. - * - * @author Brett Porter - * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store - * @todo support other implementations than XML file - * @plexus.component - */ -public class DefaultReportingStore - extends AbstractLogEnabled - implements ReportingStore -{ - /** - * The cached reports for given repositories. - */ - private Map/**/ reports = new HashMap(); - - public ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup ) - throws ReportingStoreException - { - String key = getKey( repository, reportGroup ); - ReportingDatabase database = (ReportingDatabase) reports.get( key ); - - if ( database == null ) - { - ReportingXpp3Reader reader = new ReportingXpp3Reader(); - - File file = getReportFilename( repository, reportGroup ); - - FileReader fileReader = null; - try - { - fileReader = new FileReader( file ); - } - catch ( FileNotFoundException e ) - { - database = new ReportingDatabase( reportGroup, repository ); - } - - if ( database == null ) - { - getLogger().info( "Reading report database from " + file ); - try - { - Reporting reporting = reader.read( fileReader, false ); - database = new ReportingDatabase( reportGroup, reporting, repository ); - } - catch ( IOException e ) - { - throw new ReportingStoreException( e.getMessage(), e ); - } - catch ( XmlPullParserException e ) - { - throw new ReportingStoreException( e.getMessage(), e ); - } - finally - { - IOUtils.closeQuietly( fileReader ); - } - } - - reports.put( key, database ); - } - return database; - } - - private static String getKey( ArtifactRepository repository, ReportGroup reportGroup ) - { - return repository.getId() + "/" + reportGroup.getFilename(); - } - - private static File getReportFilename( ArtifactRepository repository, ReportGroup reportGroup ) - { - return new File( repository.getBasedir(), ".reports/" + reportGroup.getFilename() ); - } - - public void storeReports( ReportingDatabase database, ArtifactRepository repository ) - throws ReportingStoreException - { - database.updateTimings(); - - ReportingXpp3Writer writer = new ReportingXpp3Writer(); - - File file = getReportFilename( repository, database.getReportGroup() ); - getLogger().info( "Writing reports to " + file ); - FileWriter fileWriter = null; - try - { - file.getParentFile().mkdirs(); - - fileWriter = new FileWriter( file ); - writer.write( fileWriter, database.getReporting() ); - } - catch ( IOException e ) - { - throw new ReportingStoreException( e.getMessage(), e ); - } - finally - { - IOUtils.closeQuietly( fileWriter ); - } - } -} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java deleted file mode 100644 index a7ead7628..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStore.java +++ /dev/null @@ -1,60 +0,0 @@ -package org.apache.maven.archiva.reporting.store; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.artifact.repository.ArtifactRepository; - -/** - * A component for loading the reporting database into the model. - * - * @author Brett Porter - * @todo this is something that could possibly be generalised into Modello. - */ -public interface ReportingStore -{ - /** - * The Plexus role for the component. - */ - String ROLE = ReportingStore.class.getName(); - - /** - * Get the reports from the store. A cached version may be used. - * - * @param repository the repository to load the reports for - * @param reportGroup the report group to get the report for - * @return the reporting database - * @throws ReportingStoreException if there was a problem reading the store - */ - ReportingDatabase getReportsFromStore( ArtifactRepository repository, ReportGroup reportGroup ) - throws ReportingStoreException; - - /** - * Save the reporting to the store. - * - * @param database the reports to store - * @param repository the repositorry to store the reports in - * @throws ReportingStoreException if there was a problem writing the store - */ - void storeReports( ReportingDatabase database, ArtifactRepository repository ) - throws ReportingStoreException; - -} diff --git a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java b/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java deleted file mode 100644 index e3f08800b..000000000 --- a/archiva-reports-standard/src/main/java/org/apache/maven/archiva/reporting/store/ReportingStoreException.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.apache.maven.archiva.reporting.store; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Exception occurring using the reporting store. - * - * @author Brett Porter - */ -public class ReportingStoreException - extends Exception -{ - public ReportingStoreException( String message ) - { - super( message ); - } - - public ReportingStoreException( String message, Throwable e ) - { - super( message, e ); - } -} diff --git a/archiva-reports-standard/src/main/mdo/reporting.mdo b/archiva-reports-standard/src/main/mdo/reporting.mdo index 915faaa31..8b41251ca 100644 --- a/archiva-reports-standard/src/main/mdo/reporting.mdo +++ b/archiva-reports-standard/src/main/mdo/reporting.mdo @@ -1,3 +1,4 @@ + - - - - - + + mail/Session + + + + + + + localhost + 25 + + + + + + + + jdbc/archiva + + + target/databases/archiva + sa + create + + + + + + jdbc/archivaShutdown + + + target/databases/archiva + sa + shutdown + + + + + jdbc/users - target/database + target/databases/users sa create - - jdbc/shutdown + + + jdbc/usersShutdown - target/database + target/databases/users sa shutdown diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ReportsAction.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ReportsAction.java index de64abe26..952cc674c 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ReportsAction.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ReportsAction.java @@ -19,31 +19,15 @@ package org.apache.maven.archiva.web.action; * under the License. */ -import com.opensymphony.xwork.Preparable; -import org.apache.maven.archiva.configuration.ArchivaConfiguration; -import org.apache.maven.archiva.configuration.Configuration; -import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory; -import org.apache.maven.archiva.configuration.RepositoryConfiguration; -import org.apache.maven.archiva.discoverer.DiscovererException; -import org.apache.maven.archiva.discoverer.filter.AcceptAllArtifactFilter; -import org.apache.maven.archiva.discoverer.filter.SnapshotArtifactFilter; import org.apache.maven.archiva.reporting.database.ReportingDatabase; -import org.apache.maven.archiva.reporting.executor.ReportExecutor; -import org.apache.maven.archiva.reporting.group.ReportGroup; -import org.apache.maven.archiva.reporting.store.ReportingStoreException; import org.apache.maven.archiva.security.ArchivaRoleConstants; -import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.resolver.filter.ArtifactFilter; import org.codehaus.plexus.security.rbac.Resource; import org.codehaus.plexus.security.ui.web.interceptor.SecureAction; import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle; import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException; import org.codehaus.plexus.xwork.action.PlexusActionSupport; -import java.util.ArrayList; -import java.util.Iterator; import java.util.List; -import java.util.Map; /** * Repository reporting. @@ -53,185 +37,23 @@ import java.util.Map; */ public class ReportsAction extends PlexusActionSupport - implements Preparable, SecureAction + implements SecureAction { /** * @plexus.requirement */ - private ArchivaConfiguration archivaConfiguration; + private ReportingDatabase database; - /** - * @plexus.requirement - */ - private ConfiguredRepositoryFactory factory; - - private List databases; - - private String repositoryId; - - /** - * @plexus.requirement - */ - private ReportExecutor executor; - - private Configuration configuration; - - /** - * @plexus.requirement role="org.apache.maven.archiva.reporting.group.ReportGroup" - */ - private Map reports; - - private String reportGroup = DEFAULT_REPORT_GROUP; - - private static final String DEFAULT_REPORT_GROUP = "health"; - - private String filter; + private List reports; public String execute() throws Exception { - ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup ); - - databases = new ArrayList(); - - if ( repositoryId != null && !repositoryId.equals( "-" ) ) - { - RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId ); - getReport( repositoryConfiguration, reportGroup ); - } - else - { - for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); ) - { - RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next(); - - getReport( repositoryConfiguration, reportGroup ); - } - } - return SUCCESS; - } - - private void getReport( RepositoryConfiguration repositoryConfiguration, ReportGroup reportGroup ) - throws ReportingStoreException - { - ArtifactRepository repository = factory.createRepository( repositoryConfiguration ); - - ReportingDatabase database = executor.getReportDatabase( repository, reportGroup ); - - if ( filter != null && !filter.equals( "-" ) ) - { - database = database.getFilteredDatabase( filter ); - } - - databases.add( database ); - } - - public String runReport() - throws Exception - { - ReportGroup reportGroup = (ReportGroup) reports.get( this.reportGroup ); - - RepositoryConfiguration repositoryConfiguration = configuration.getRepositoryById( repositoryId ); - ArtifactRepository repository = factory.createRepository( repositoryConfiguration ); - - ReportingDatabase database = executor.getReportDatabase( repository, reportGroup ); - if ( database.isInProgress() ) - { - return SUCCESS; - } - - generateReport( database, repositoryConfiguration, reportGroup, repository ); - + reports = database.getArtifactDatabase().getAllArtifactResults(); + return SUCCESS; } - private void generateReport( ReportingDatabase database, RepositoryConfiguration repositoryConfiguration, - ReportGroup reportGroup, ArtifactRepository repository ) - throws DiscovererException, ReportingStoreException - { - database.setInProgress( true ); - - List blacklistedPatterns = new ArrayList(); - if ( repositoryConfiguration.getBlackListPatterns() != null ) - { - blacklistedPatterns.addAll( repositoryConfiguration.getBlackListPatterns() ); - } - if ( configuration.getGlobalBlackListPatterns() != null ) - { - blacklistedPatterns.addAll( configuration.getGlobalBlackListPatterns() ); - } - - ArtifactFilter filter; - if ( repositoryConfiguration.isIncludeSnapshots() ) - { - filter = new AcceptAllArtifactFilter(); - } - else - { - filter = new SnapshotArtifactFilter(); - } - - try - { - executor.runReports( reportGroup, repository, blacklistedPatterns, filter ); - } - finally - { - database.setInProgress( false ); - } - } - - public void setReportGroup( String reportGroup ) - { - this.reportGroup = reportGroup; - } - - public String getReportGroup() - { - return reportGroup; - } - - public String getRepositoryId() - { - return repositoryId; - } - - public void setRepositoryId( String repositoryId ) - { - this.repositoryId = repositoryId; - } - - public List getDatabases() - { - return databases; - } - - public void prepare() - throws Exception - { - configuration = archivaConfiguration.getConfiguration(); - } - - public Configuration getConfiguration() - { - return configuration; - } - - public Map getReports() - { - return reports; - } - - public String getFilter() - { - return filter; - } - - public void setFilter( String filter ) - { - this.filter = filter; - } - public SecureActionBundle getSecureActionBundle() throws SecureActionException { @@ -242,4 +64,9 @@ public class ReportsAction return bundle; } + + public List getReports() + { + return reports; + } } diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ShowArtifactAction.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ShowArtifactAction.java index f8f8ca318..ecc109a86 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ShowArtifactAction.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/ShowArtifactAction.java @@ -32,6 +32,7 @@ import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.lucene.LuceneQuery; import org.apache.maven.archiva.indexer.record.StandardArtifactIndexRecord; import org.apache.maven.archiva.proxy.ProxyException; +import org.apache.maven.archiva.reporting.database.ArtifactResultsDatabase; import org.apache.maven.archiva.web.util.VersionMerger; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.factory.ArtifactFactory; @@ -111,6 +112,11 @@ public class ShowArtifactAction * @plexus.requirement */ private DependencyTreeBuilder dependencyTreeBuilder; + + /** + * @plexus.requirement + */ + ArtifactResultsDatabase artifactsDatabase; private String groupId; @@ -131,6 +137,8 @@ public class ShowArtifactAction private String artifactPath; private List mailingLists; + + private List reports; public String artifact() throws IOException, XmlPullParserException, ProjectBuildingException, ResourceDoesNotExistException, @@ -182,6 +190,21 @@ public class ShowArtifactAction return SUCCESS; } + + public String reports() + throws IOException, XmlPullParserException, ProjectBuildingException + { + if ( !checkParameters() ) + { + return ERROR; + } + + System.out.println("#### In reports."); + this.reports = artifactsDatabase.findArtifactResults( groupId, artifactId, version ); + System.out.println("#### Found " + reports.size() + " reports."); + + return SUCCESS; + } public String dependees() throws IOException, XmlPullParserException, ProjectBuildingException, RepositoryIndexException, @@ -518,4 +541,9 @@ public class ShowArtifactAction { return repositoryUrlName; } + + public List getReports() + { + return reports; + } } diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/ConfigureAction.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/ConfigureAction.java index 34e3a981a..a2cdcffb4 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/ConfigureAction.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/ConfigureAction.java @@ -22,12 +22,13 @@ package org.apache.maven.archiva.web.action.admin; import com.opensymphony.xwork.ModelDriven; import com.opensymphony.xwork.Preparable; import com.opensymphony.xwork.Validateable; + import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.Configuration; import org.apache.maven.archiva.configuration.InvalidConfigurationException; import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; -import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor; +import org.apache.maven.archiva.repositories.ActiveManagedRepositories; import org.apache.maven.archiva.security.ArchivaRoleConstants; import org.codehaus.plexus.registry.RegistryException; import org.codehaus.plexus.scheduler.CronExpressionValidator; @@ -56,15 +57,15 @@ public class ConfigureAction private ArchivaConfiguration archivaConfiguration; /** - * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer" + * @plexus.requirement */ - private IndexerTaskExecutor indexer; + private ActiveManagedRepositories activeRepositories; /** * The configuration. */ private Configuration configuration; - + private CronExpressionValidator cronValidator; private String second = "0"; @@ -100,7 +101,7 @@ public class ConfigureAction { // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded // TODO: if this is changed, do we move the index or recreate it? - configuration.setIndexerCronExpression( getCronExpression() ); + configuration.setDataRefreshCronExpression( getCronExpression() ); // Normalize the path File file = new File( configuration.getIndexPath() ); @@ -125,7 +126,7 @@ public class ConfigureAction public String input() { - String[] cronEx = configuration.getIndexerCronExpression().split( " " ); + String[] cronEx = configuration.getDataRefreshCronExpression().split( " " ); int i = 0; while ( i < cronEx.length ) @@ -157,9 +158,9 @@ public class ConfigureAction i++; } - if ( indexer.getLastIndexingTime() != 0 ) + if ( activeRepositories.getLastDataRefreshTime() != 0 ) { - lastIndexingTime = new Date( indexer.getLastIndexingTime() ).toString(); + lastIndexingTime = new Date( activeRepositories.getLastDataRefreshTime() ).toString(); } else { diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/RunRepositoryTaskAction.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/RunRepositoryTaskAction.java index c0fa12f82..06daf24a9 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/RunRepositoryTaskAction.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/action/admin/RunRepositoryTaskAction.java @@ -20,12 +20,12 @@ package org.apache.maven.archiva.web.action.admin; */ import org.apache.maven.archiva.scheduler.RepositoryTaskScheduler; -import org.apache.maven.archiva.scheduler.TaskExecutionException; import org.apache.maven.archiva.security.ArchivaRoleConstants; import org.codehaus.plexus.security.rbac.Resource; import org.codehaus.plexus.security.ui.web.interceptor.SecureAction; import org.codehaus.plexus.security.ui.web.interceptor.SecureActionBundle; import org.codehaus.plexus.security.ui.web.interceptor.SecureActionException; +import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; import org.codehaus.plexus.xwork.action.PlexusActionSupport; /** @@ -42,10 +42,10 @@ public class RunRepositoryTaskAction */ private RepositoryTaskScheduler taskScheduler; - public String runIndexer() + public String runRefresh() throws TaskExecutionException { - taskScheduler.runIndexer(); + taskScheduler.runDataRefresh(); return SUCCESS; } diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java index 93568a2ae..6ae6b8a49 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/AuditLog.java @@ -35,7 +35,7 @@ import java.util.Date; /** * AuditLog - Audit Log. * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ * * @plexus.component role="org.apache.maven.archiva.web.repository.AuditLog" diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java index 77e45ebfd..db43e54b7 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/ProxiedDavServer.java @@ -49,7 +49,7 @@ import java.util.List; /** * ProxiedDavServer * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ * @plexus.component role="org.codehaus.plexus.webdav.DavServerComponent" * role-hint="proxied" diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java index 687dae1f2..cb3184443 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/repository/RepositoryServlet.java @@ -52,7 +52,7 @@ import java.util.List; /** * RepositoryServlet * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class RepositoryServlet diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java index 226f27c38..79c4b95b6 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifact.java @@ -25,7 +25,7 @@ import com.opensymphony.xwork.util.OgnlValueStack; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; -import org.apache.maven.archiva.artifact.ManagedArtifact; +import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact; import org.apache.maven.archiva.configuration.RepositoryConfiguration; import org.apache.maven.archiva.repositories.ActiveManagedRepositories; import org.apache.maven.project.ProjectBuildingException; @@ -44,7 +44,7 @@ import javax.servlet.jsp.PageContext; /** * DownloadArtifact * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ * * @plexus.component role="com.opensymphony.webwork.components.Component" role-hint="download-artifact" diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java index 7127acad9..21aaa982f 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/DownloadArtifactTag.java @@ -27,7 +27,7 @@ import javax.servlet.jsp.tagext.TagSupport; /** * DownloadArtifactTag * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class DownloadArtifactTag diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java index 8efab8a18..b59c5636f 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/ExpressionTool.java @@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.Tag; /** * ExpressionTool * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class ExpressionTool diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java index 9d2e5d6f8..75b3b8081 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLink.java @@ -33,7 +33,7 @@ import javax.servlet.http.HttpServletResponse; /** * GroupIdLink * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class GroupIdLink diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java index 462d6ae6f..ec42b8bbe 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/GroupIdLinkTag.java @@ -29,7 +29,7 @@ import javax.servlet.jsp.tagext.TagSupport; /** * GroupIdLink * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class GroupIdLinkTag diff --git a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java index 2b730c6bb..a56068a30 100644 --- a/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java +++ b/archiva-webapp/src/main/java/org/apache/maven/archiva/web/tags/PlexusTagUtil.java @@ -29,7 +29,7 @@ import javax.servlet.jsp.PageContext; /** * PlexusTagUtil * - * @author Joakim Erdfelt + * @author Joakim Erdfelt * @version $Id$ */ public class PlexusTagUtil diff --git a/archiva-webapp/src/main/resources/META-INF/plexus/application.xml b/archiva-webapp/src/main/resources/META-INF/plexus/application.xml index 62b65f63e..9b0d313bf 100644 --- a/archiva-webapp/src/main/resources/META-INF/plexus/application.xml +++ b/archiva-webapp/src/main/resources/META-INF/plexus/application.xml @@ -71,6 +71,60 @@
+ + org.codehaus.plexus.jdo.JdoFactory + archiva + org.codehaus.plexus.jdo.DataSourceConfigurableJdoFactory + + + java:comp/env/jdbc/archiva + java:comp/env/jdbc/archivaShutdown + + + org.jpox.PersistenceManagerFactoryImpl + + + org.jpox.autoCreateSchema + true + + + org.jpox.validateTables + false + + + org.jpox.validateConstraints + false + + + org.jpox.validateColumns + false + + + org.jpox.autoStartMechanism + None + + + org.jpox.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.poid.transactionIsolation + READ_UNCOMMITTED + + + org.jpox.rdbms.dateTimezone + JDK_DEFAULT_TIMEZONE + + + + + + org.codehaus.plexus.jdo.JdoFactory users @@ -115,12 +169,12 @@ org.jpox.rdbms.dateTimezone JDK_DEFAULT_TIMEZONE - + --> @@ -130,101 +184,9 @@ --> org.codehaus.plexus.logging.LoggerManager - org.codehaus.plexus.logging.log4j.Log4JLoggerManager + org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager basic - - - WARN - console,rolling - - - - console - DEBUG - org.apache.log4j.ConsoleAppender - %d [%t] %-5p %-30c{1} - %m%n - - - - rolling - DEBUG - org.apache.log4j.DailyRollingFileAppender - %-4r [%t] %-5p %c %x - %m%n - - - - file - ${appserver.base}/logs/archiva.log - - - append - true - - - datePattern - '.'yyyy-MM-dd - - - - - - - - - org.apache.maven - INFO - - - org.codehaus.plexus.security - INFO - - - - org.codehaus.plexus.mailsender.MailSender - INFO - - - org.quartz - INFO - - - org.apache.jasper - INFO - - - com.opensymphony.xwork - INFO - - - com.opensymphony.webwork - INFO - - - org.codehaus.plexus.PlexusContainer - INFO - - - JPOX - WARN - - - JPOX.MetaData - ERROR - - - - freemarker - WARN - - - -
@@ -233,7 +195,7 @@ org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor - indexer + data-refresh diff --git a/archiva-webapp/src/main/resources/log4j.xml b/archiva-webapp/src/main/resources/log4j.xml new file mode 100644 index 000000000..9875c602e --- /dev/null +++ b/archiva-webapp/src/main/resources/log4j.xml @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/archiva-webapp/src/main/resources/xwork.xml b/archiva-webapp/src/main/resources/xwork.xml index f89edbe4b..703e922fc 100644 --- a/archiva-webapp/src/main/resources/xwork.xml +++ b/archiva-webapp/src/main/resources/xwork.xml @@ -92,6 +92,10 @@ login /security + + login + /security + login /security @@ -177,6 +181,10 @@ /WEB-INF/jsp/showArtifact.jsp + + /WEB-INF/jsp/showArtifact.jsp + + /WEB-INF/jsp/showArtifact.jsp diff --git a/archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf b/archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf new file mode 100644 index 000000000..1a9d41638 --- /dev/null +++ b/archiva-webapp/src/main/webapp/WEB-INF/jsp/include/artifactReports.jspf @@ -0,0 +1,38 @@ +<%-- + ~ Licensed to the Apache Software Foundation (ASF) under one + ~ or more contributor license agreements. See the NOTICE file + ~ distributed with this work for additional information + ~ regarding copyright ownership. The ASF licenses this file + ~ to you under the Apache License, Version 2.0 (the + ~ "License"); you may not use this file except in compliance + ~ with the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, + ~ software distributed under the License is distributed on an + ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + ~ KIND, either express or implied. See the License for the + ~ specific language governing permissions and limitations + ~ under the License. + --%> + +<%@ taglib prefix="ww" uri="/webwork" %> +<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %> +<%@ taglib prefix="my" tagdir="/WEB-INF/tags" %> + + +

+ ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type} +

+
    + +
  • + ${result.reason} +
  • +
    +
+
+ + No reports for this artifact. + diff --git a/archiva-webapp/src/main/webapp/WEB-INF/jsp/reports/reports.jsp b/archiva-webapp/src/main/webapp/WEB-INF/jsp/reports/reports.jsp index 5d73c0cd7..33d342ae0 100644 --- a/archiva-webapp/src/main/webapp/WEB-INF/jsp/reports/reports.jsp +++ b/archiva-webapp/src/main/webapp/WEB-INF/jsp/reports/reports.jsp @@ -26,9 +26,7 @@ - - - Report: ${reports[reportGroup].name} + Reports @@ -38,135 +36,22 @@
- - - - - - - - - - - -
-
- <%-- TODO! - Repair all - | - --%> - - - - - ${database.repository.id} - - - Regenerate Report - - - - - Report in progress - - -
-

Repository: ${database.repository.name}

- -

- - - Status: - " width="15" height="15" alt=""/> - ${database.numFailures} - " width="15" height="15" alt=""/> - ${database.numWarnings} - " width="15" height="15" alt=""/> - ${database.numNotices} - - - - - Last updated: , - execution time: minutes - seconds - - - - - This report has not yet been generated. Generate Report - - - -

- - <%-- TODO need to protect iterations against concurrent modification exceptions by cloning the lists synchronously --%> - <%-- TODO! paginate (displaytag?) --%> - -

Artifacts

- -
    - -
  • ${result.reason}
  • -
    - -
  • ${result.reason}
  • -
    - -
  • ${result.reason}
  • -
    -
-

- -

- <%-- TODO! - - Repair - - --%> -
- -

- ... more ... -

-
-
- -

Metadata

- -
    - -
  • ${result.reason}
  • -
    - -
  • ${result.reason}
  • -
    - -
  • ${result.reason}
  • -
    -
-

- -

- <%-- TODO! - - Repair - - --%> -
- -

- ... more ... -

-
-
-
+ +

+ ${report.groupId} : ${report.artifactId} : ${report.version} : ${report.classifier} : ${report.type} +

+
    + +
  • + ${result.reason} +
  • +
    +
+ + No reports for any artifact. + +
diff --git a/archiva-webapp/src/main/webapp/WEB-INF/jsp/showArtifact.jsp b/archiva-webapp/src/main/webapp/WEB-INF/jsp/showArtifact.jsp index 60299d7d3..0ba14bb84 100644 --- a/archiva-webapp/src/main/webapp/WEB-INF/jsp/showArtifact.jsp +++ b/archiva-webapp/src/main/webapp/WEB-INF/jsp/showArtifact.jsp @@ -21,6 +21,7 @@ <%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %> <%@ taglib prefix="my" tagdir="/WEB-INF/tags" %> <%@ taglib prefix="archiva" uri="http://maven.apache.org/archiva" %> +<%@ taglib prefix="pss" uri="/plexusSecuritySystem" %> @@ -115,6 +116,17 @@ Mailing Lists + + + + + + + + + Reports + + @@ -135,6 +147,9 @@ <%@ include file="/WEB-INF/jsp/include/mailingLists.jspf" %> + + <%@ include file="/WEB-INF/jsp/include/artifactReports.jspf" %> + <%@ include file="/WEB-INF/jsp/include/artifactInfo.jspf" %> diff --git a/pom.xml b/pom.xml index 124dcc2b1..897821db7 100644 --- a/pom.xml +++ b/pom.xml @@ -24,6 +24,9 @@ 4 ../pom/maven/pom.xml + + 2.0.5 + org.apache.maven.archiva archiva pom @@ -81,9 +84,9 @@ 1.3.3 + generate descriptor - merge-descriptors @@ -120,6 +123,7 @@ archiva-applet archiva-converter + archiva-common archiva-discoverer archiva-reports-standard archiva-indexer @@ -154,30 +158,6 @@ maven-app-configuration-web 1.0-SNAPSHOT
- org.codehaus.plexus plexus-container-default @@ -243,6 +223,20 @@ wagon-http-lightweight ${wagon.version} + + org.apache.maven.archiva + archiva-common + ${pom.version} + + org.apache.maven.archiva archiva-core @@ -519,17 +513,20 @@ - - - - codehaus.org - http://snapshots.repository.codehaus.org - - - codehaus.org + http://repository.codehaus.org + + true + + + false + + + + + snapshots.codehaus.org http://snapshots.repository.codehaus.org false @@ -539,6 +536,19 @@ + + + + snapshots.codehaus.org + http://snapshots.repository.codehaus.org + + false + + + true + + + 2.0.5 1.0-beta-2