--- /dev/null
+package org.apache.maven.archiva.applet;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import javax.swing.*;
+import java.applet.Applet;
+import java.awt.*;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.AccessController;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivilegedAction;
+
+/**
+ * Applet that takes a file on the local filesystem and checksums it for sending to the server.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ChecksumApplet
+ extends Applet
+{
+ private static final int CHECKSUM_BUFFER_SIZE = 8192;
+
+ private static final int BYTE_MASK = 0xFF;
+
+ private JProgressBar progressBar;
+
+ public void init()
+ {
+ setLayout( new BorderLayout() );
+ progressBar = new JProgressBar();
+ progressBar.setStringPainted( true );
+ add( progressBar, BorderLayout.CENTER );
+ JLabel label = new JLabel( "Checksum progress: " );
+ add( label, BorderLayout.WEST );
+ }
+
+ public String generateMd5( final String file )
+ throws IOException, NoSuchAlgorithmException
+ {
+ Object o = AccessController.doPrivileged( new PrivilegedAction()
+ {
+ public Object run()
+ {
+ try
+ {
+ return checksumFile( file );
+ }
+ catch ( NoSuchAlgorithmException e )
+ {
+ return "Error checksumming file: " + e.getMessage();
+ }
+ catch ( FileNotFoundException e )
+ {
+ return "Couldn't find the file. " + e.getMessage();
+ }
+ catch ( IOException e )
+ {
+ return "Error reading file: " + e.getMessage();
+ }
+ }
+ } );
+ return (String) o;
+ }
+
+ protected String checksumFile( String file )
+ throws NoSuchAlgorithmException, IOException
+ {
+ MessageDigest digest = MessageDigest.getInstance( "MD5" );
+
+ long total = new File( file ).length();
+ InputStream fis = new FileInputStream( file );
+ try
+ {
+ long totalRead = 0;
+ byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE];
+ int numRead;
+ do
+ {
+ numRead = fis.read( buffer );
+ if ( numRead > 0 )
+ {
+ digest.update( buffer, 0, numRead );
+ totalRead += numRead;
+ progressBar.setValue( (int) ( totalRead * progressBar.getMaximum() / total ) );
+ }
+ }
+ while ( numRead != -1 );
+ }
+ finally
+ {
+ fis.close();
+ }
+
+ return byteArrayToHexStr( digest.digest() );
+ }
+
+ protected static String byteArrayToHexStr( byte[] data )
+ {
+ String output = "";
+
+ for ( int cnt = 0; cnt < data.length; cnt++ )
+ {
+ //Deposit a byte into the 8 lsb of an int.
+ int tempInt = data[cnt] & BYTE_MASK;
+
+ //Get hex representation of the int as a string.
+ String tempStr = Integer.toHexString( tempInt );
+
+ //Append a leading 0 if necessary so that each hex string will contain 2 characters.
+ if ( tempStr.length() == 1 )
+ {
+ tempStr = "0" + tempStr;
+ }
+
+ //Concatenate the two characters to the output string.
+ output = output + tempStr;
+ }
+
+ return output.toUpperCase();
+ }
+}
\ No newline at end of file
+++ /dev/null
-package org.apache.maven.repository.applet;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import javax.swing.*;
-import java.applet.Applet;
-import java.awt.*;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.AccessController;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.security.PrivilegedAction;
-
-/**
- * Applet that takes a file on the local filesystem and checksums it for sending to the server.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ChecksumApplet
- extends Applet
-{
- private static final int CHECKSUM_BUFFER_SIZE = 8192;
-
- private static final int BYTE_MASK = 0xFF;
-
- private JProgressBar progressBar;
-
- public void init()
- {
- setLayout( new BorderLayout() );
- progressBar = new JProgressBar();
- progressBar.setStringPainted( true );
- add( progressBar, BorderLayout.CENTER );
- JLabel label = new JLabel( "Checksum progress: " );
- add( label, BorderLayout.WEST );
- }
-
- public String generateMd5( final String file )
- throws IOException, NoSuchAlgorithmException
- {
- Object o = AccessController.doPrivileged( new PrivilegedAction()
- {
- public Object run()
- {
- try
- {
- return checksumFile( file );
- }
- catch ( NoSuchAlgorithmException e )
- {
- return "Error checksumming file: " + e.getMessage();
- }
- catch ( FileNotFoundException e )
- {
- return "Couldn't find the file. " + e.getMessage();
- }
- catch ( IOException e )
- {
- return "Error reading file: " + e.getMessage();
- }
- }
- } );
- return (String) o;
- }
-
- protected String checksumFile( String file )
- throws NoSuchAlgorithmException, IOException
- {
- MessageDigest digest = MessageDigest.getInstance( "MD5" );
-
- long total = new File( file ).length();
- InputStream fis = new FileInputStream( file );
- try
- {
- long totalRead = 0;
- byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE];
- int numRead;
- do
- {
- numRead = fis.read( buffer );
- if ( numRead > 0 )
- {
- digest.update( buffer, 0, numRead );
- totalRead += numRead;
- progressBar.setValue( (int) ( totalRead * progressBar.getMaximum() / total ) );
- }
- }
- while ( numRead != -1 );
- }
- finally
- {
- fis.close();
- }
-
- return byteArrayToHexStr( digest.digest() );
- }
-
- protected static String byteArrayToHexStr( byte[] data )
- {
- String output = "";
-
- for ( int cnt = 0; cnt < data.length; cnt++ )
- {
- //Deposit a byte into the 8 lsb of an int.
- int tempInt = data[cnt] & BYTE_MASK;
-
- //Get hex representation of the int as a string.
- String tempStr = Integer.toHexString( tempInt );
-
- //Append a leading 0 if necessary so that each hex string will contain 2 characters.
- if ( tempStr.length() == 1 )
- {
- tempStr = "0" + tempStr;
- }
-
- //Concatenate the two characters to the output string.
- output = output + tempStr;
- }
-
- return output.toUpperCase();
- }
-}
\ No newline at end of file
<instrumentation>
<!-- exclude generated -->
<excludes>
- <exclude>org/apache/maven/repository/configuration/io/**</exclude>
- <exclude>org/apache/maven/repository/configuration/*RepositoryConfiguration.*</exclude>
- <exclude>org/apache/maven/repository/configuration/Configuration.*</exclude>
- <exclude>org/apache/maven/repository/configuration/Proxy.*</exclude>
+ <exclude>org/apache/maven/archiva/configuration/io/**</exclude>
+ <exclude>org/apache/maven/archiva/configuration/*RepositoryConfiguration.*</exclude>
+ <exclude>org/apache/maven/archiva/configuration/Configuration.*</exclude>
+ <exclude>org/apache/maven/archiva/configuration/Proxy.*</exclude>
</excludes>
</instrumentation>
</configuration>
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * An error changing the configuration
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ConfigurationChangeException
+ extends Exception
+{
+ public ConfigurationChangeException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Component capable of noticing configuration changes and adjusting accordingly.
+ * This is not a Plexus role.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface ConfigurationChangeListener
+{
+ /**
+ * Notify the object that there has been a configuration change.
+ *
+ * @param configuration the new configuration
+ * @throws InvalidConfigurationException if there is a problem with the new configuration
+ * @throws ConfigurationChangeException if there is a problem changing the configuration, but the configuration is valid
+ */
+ void notifyOfConfigurationChange( Configuration configuration )
+ throws InvalidConfigurationException, ConfigurationChangeException;
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A component for loading the configuration into the model.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo this is something that could possibly be generalised into Modello.
+ */
+public interface ConfigurationStore
+{
+ /**
+ * The Plexus role for the component.
+ */
+ String ROLE = ConfigurationStore.class.getName();
+
+ /**
+ * Get the configuration from the store. A cached version may be used.
+ *
+ * @return the configuration
+ * @throws ConfigurationStoreException if there is a problem loading the configuration
+ */
+ Configuration getConfigurationFromStore()
+ throws ConfigurationStoreException;
+
+ /**
+ * Save the configuration to the store.
+ *
+ * @param configuration the configuration to store
+ */
+ void storeConfiguration( Configuration configuration )
+ throws ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException;
+
+ /**
+ * Add a configuration change listener.
+ *
+ * @param listener the listener
+ */
+ void addChangeListener( ConfigurationChangeListener listener );
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occurring using the configuration store.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ConfigurationStoreException
+ extends Exception
+{
+ public ConfigurationStoreException( String message )
+ {
+ super( message );
+ }
+
+ public ConfigurationStoreException( String message, Throwable e )
+ {
+ super( message, e );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.io.xpp3.ConfigurationXpp3Reader;
+import org.apache.maven.archiva.configuration.io.xpp3.ConfigurationXpp3Writer;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Load and store the configuration. No synchronization is used, but it is unnecessary as the old configuration object
+ * can continue to be used.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
+ * @todo would be good to monitor the store file for changes
+ * @todo support other implementations than XML file
+ * @plexus.component
+ */
+public class DefaultConfigurationStore
+ extends AbstractLogEnabled
+ implements ConfigurationStore
+{
+ /**
+ * @plexus.configuration default-value="${configuration.store.file}"
+ */
+ private File file;
+
+ /**
+ * The cached configuration.
+ */
+ private Configuration configuration;
+
+ /**
+ * List of listeners to configuration changes.
+ */
+ private List/*<ConfigurationChangeListener>*/ listeners = new LinkedList();
+
+ public Configuration getConfigurationFromStore()
+ throws ConfigurationStoreException
+ {
+ if ( configuration == null )
+ {
+ ConfigurationXpp3Reader reader = new ConfigurationXpp3Reader();
+
+ if ( file == null )
+ {
+ file = new File( System.getProperty( "user.home" ), "/.m2/archiva-manager.xml" );
+ }
+
+ FileReader fileReader;
+ try
+ {
+ fileReader = new FileReader( file );
+ }
+ catch ( FileNotFoundException e )
+ {
+ getLogger().warn( "Configuration file: " + file + " not found. Using defaults." );
+ configuration = new Configuration();
+ return configuration;
+ }
+
+ getLogger().info( "Reading configuration from " + file );
+ try
+ {
+ configuration = reader.read( fileReader, false );
+ }
+ catch ( IOException e )
+ {
+ throw new ConfigurationStoreException( e.getMessage(), e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ConfigurationStoreException( e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+ }
+ return configuration;
+ }
+
+ public void storeConfiguration( Configuration configuration )
+ throws ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
+ {
+ for ( Iterator i = listeners.iterator(); i.hasNext(); )
+ {
+ ConfigurationChangeListener listener = (ConfigurationChangeListener) i.next();
+
+ listener.notifyOfConfigurationChange( configuration );
+ }
+
+ ConfigurationXpp3Writer writer = new ConfigurationXpp3Writer();
+
+ getLogger().info( "Writing configuration to " + file );
+ FileWriter fileWriter = null;
+ try
+ {
+ file.getParentFile().mkdirs();
+
+ fileWriter = new FileWriter( file );
+ writer.write( fileWriter, configuration );
+ }
+ catch ( IOException e )
+ {
+ throw new ConfigurationStoreException( e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fileWriter );
+ }
+ }
+
+ public void addChangeListener( ConfigurationChangeListener listener )
+ {
+ listeners.add( listener );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * An error in the configuration.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class InvalidConfigurationException
+ extends Exception
+{
+ private final String name;
+
+ public InvalidConfigurationException( String name, String message )
+ {
+ super( message );
+ this.name = name;
+ }
+
+ public InvalidConfigurationException( String name, String message, Throwable cause )
+ {
+ super( message, cause );
+
+ this.name = name;
+ }
+
+ public String getName()
+ {
+ return name;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.util.StringUtils;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Enumeration;
+import java.util.Properties;
+import java.util.StringTokenizer;
+
+/**
+ * @author Ben Walding
+ * @author Brett Porter
+ */
+public class MavenProxyPropertyLoader
+{
+ private static final String REPO_LOCAL_STORE = "repo.local.store";
+
+ private static final String PROXY_LIST = "proxy.list";
+
+ private static final String REPO_LIST = "repo.list";
+
+ public void load( Properties props, Configuration configuration )
+ throws InvalidConfigurationException
+ {
+ // set up the managed repository
+ String localCachePath = getMandatoryProperty( props, REPO_LOCAL_STORE );
+
+ RepositoryConfiguration config = new RepositoryConfiguration();
+ config.setDirectory( localCachePath );
+ config.setName( "Imported Maven-Proxy Cache" );
+ config.setId( "maven-proxy" );
+ configuration.addRepository( config );
+
+ //just get the first HTTP proxy and break
+ String propertyList = props.getProperty( PROXY_LIST );
+ if ( propertyList != null )
+ {
+ StringTokenizer tok = new StringTokenizer( propertyList, "," );
+ while ( tok.hasMoreTokens() )
+ {
+ String key = tok.nextToken();
+ if ( StringUtils.isNotEmpty( key ) )
+ {
+ Proxy proxy = new Proxy();
+ proxy.setHost( getMandatoryProperty( props, "proxy." + key + ".host" ) );
+ proxy.setPort( Integer.parseInt( getMandatoryProperty( props, "proxy." + key + ".port" ) ) );
+
+ // the username and password isn't required
+ proxy.setUsername( props.getProperty( "proxy." + key + ".username" ) );
+ proxy.setPassword( props.getProperty( "proxy." + key + ".password" ) );
+
+ configuration.setProxy( proxy );
+
+ //accept only one proxy configuration
+ break;
+ }
+ }
+ }
+
+ //get the remote repository list
+ String repoList = getMandatoryProperty( props, REPO_LIST );
+
+ StringTokenizer tok = new StringTokenizer( repoList, "," );
+ while ( tok.hasMoreTokens() )
+ {
+ String key = tok.nextToken();
+
+ Properties repoProps = getSubset( props, "repo." + key + "." );
+ String url = getMandatoryProperty( props, "repo." + key + ".url" );
+ String proxyKey = repoProps.getProperty( "proxy" );
+
+ boolean cacheFailures =
+ Boolean.valueOf( repoProps.getProperty( "cache.failures", "false" ) ).booleanValue();
+ boolean hardFail = Boolean.valueOf( repoProps.getProperty( "hardfail", "true" ) ).booleanValue();
+ int cachePeriod = Integer.parseInt( repoProps.getProperty( "cache.period", "60" ) );
+
+ ProxiedRepositoryConfiguration repository = new ProxiedRepositoryConfiguration();
+ repository.setId( key );
+ repository.setLayout( "legacy" );
+ repository.setManagedRepository( config.getId() );
+ repository.setName( "Imported Maven-Proxy Remote Proxy" );
+ repository.setSnapshotsInterval( cachePeriod );
+ repository.setUrl( url );
+ repository.setUseNetworkProxy( StringUtils.isNotEmpty( proxyKey ) );
+ repository.setCacheFailures( cacheFailures );
+ repository.setHardFail( hardFail );
+
+ configuration.addProxiedRepository( repository );
+ }
+ }
+
+ private Properties getSubset( Properties props, String prefix )
+ {
+ Enumeration keys = props.keys();
+ Properties result = new Properties();
+ while ( keys.hasMoreElements() )
+ {
+ String key = (String) keys.nextElement();
+ String value = props.getProperty( key );
+ if ( key.startsWith( prefix ) )
+ {
+ String newKey = key.substring( prefix.length() );
+ result.setProperty( newKey, value );
+ }
+ }
+ return result;
+ }
+
+ public void load( InputStream is, Configuration configuration )
+ throws IOException, InvalidConfigurationException
+ {
+ Properties props = new Properties();
+ props.load( is );
+ load( props, configuration );
+ }
+
+ private String getMandatoryProperty( Properties props, String key )
+ throws InvalidConfigurationException
+ {
+ String value = props.getProperty( key );
+
+ if ( value == null )
+ {
+ throw new InvalidConfigurationException( key, "Missing required field: " + key );
+ }
+
+ return value;
+ }
+}
\ No newline at end of file
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * An error changing the configuration
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ConfigurationChangeException
- extends Exception
-{
- public ConfigurationChangeException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Component capable of noticing configuration changes and adjusting accordingly.
- * This is not a Plexus role.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface ConfigurationChangeListener
-{
- /**
- * Notify the object that there has been a configuration change.
- *
- * @param configuration the new configuration
- * @throws InvalidConfigurationException if there is a problem with the new configuration
- * @throws ConfigurationChangeException if there is a problem changing the configuration, but the configuration is valid
- */
- void notifyOfConfigurationChange( Configuration configuration )
- throws InvalidConfigurationException, ConfigurationChangeException;
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * A component for loading the configuration into the model.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo this is something that could possibly be generalised into Modello.
- */
-public interface ConfigurationStore
-{
- /**
- * The Plexus role for the component.
- */
- String ROLE = ConfigurationStore.class.getName();
-
- /**
- * Get the configuration from the store. A cached version may be used.
- *
- * @return the configuration
- * @throws ConfigurationStoreException if there is a problem loading the configuration
- */
- Configuration getConfigurationFromStore()
- throws ConfigurationStoreException;
-
- /**
- * Save the configuration to the store.
- *
- * @param configuration the configuration to store
- */
- void storeConfiguration( Configuration configuration )
- throws ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException;
-
- /**
- * Add a configuration change listener.
- *
- * @param listener the listener
- */
- void addChangeListener( ConfigurationChangeListener listener );
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occurring using the configuration store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ConfigurationStoreException
- extends Exception
-{
- public ConfigurationStoreException( String message )
- {
- super( message );
- }
-
- public ConfigurationStoreException( String message, Throwable e )
- {
- super( message, e );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.io.xpp3.ConfigurationXpp3Reader;
-import org.apache.maven.repository.configuration.io.xpp3.ConfigurationXpp3Writer;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Load and store the configuration. No synchronization is used, but it is unnecessary as the old configuration object
- * can continue to be used.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be great for plexus to do this for us - so the configuration would be a component itself rather than this store
- * @todo would be good to monitor the store file for changes
- * @todo support other implementations than XML file
- * @plexus.component
- */
-public class DefaultConfigurationStore
- extends AbstractLogEnabled
- implements ConfigurationStore
-{
- /**
- * @plexus.configuration default-value="${configuration.store.file}"
- */
- private File file;
-
- /**
- * The cached configuration.
- */
- private Configuration configuration;
-
- /**
- * List of listeners to configuration changes.
- */
- private List/*<ConfigurationChangeListener>*/ listeners = new LinkedList();
-
- public Configuration getConfigurationFromStore()
- throws ConfigurationStoreException
- {
- if ( configuration == null )
- {
- ConfigurationXpp3Reader reader = new ConfigurationXpp3Reader();
-
- if ( file == null )
- {
- file = new File( System.getProperty( "user.home" ), "/.m2/repository-manager.xml" );
- }
-
- FileReader fileReader;
- try
- {
- fileReader = new FileReader( file );
- }
- catch ( FileNotFoundException e )
- {
- getLogger().warn( "Configuration file: " + file + " not found. Using defaults." );
- configuration = new Configuration();
- return configuration;
- }
-
- getLogger().info( "Reading configuration from " + file );
- try
- {
- configuration = reader.read( fileReader, false );
- }
- catch ( IOException e )
- {
- throw new ConfigurationStoreException( e.getMessage(), e );
- }
- catch ( XmlPullParserException e )
- {
- throw new ConfigurationStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtil.close( fileReader );
- }
- }
- return configuration;
- }
-
- public void storeConfiguration( Configuration configuration )
- throws ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
- {
- for ( Iterator i = listeners.iterator(); i.hasNext(); )
- {
- ConfigurationChangeListener listener = (ConfigurationChangeListener) i.next();
-
- listener.notifyOfConfigurationChange( configuration );
- }
-
- ConfigurationXpp3Writer writer = new ConfigurationXpp3Writer();
-
- getLogger().info( "Writing configuration to " + file );
- FileWriter fileWriter = null;
- try
- {
- file.getParentFile().mkdirs();
-
- fileWriter = new FileWriter( file );
- writer.write( fileWriter, configuration );
- }
- catch ( IOException e )
- {
- throw new ConfigurationStoreException( e.getMessage(), e );
- }
- finally
- {
- IOUtil.close( fileWriter );
- }
- }
-
- public void addChangeListener( ConfigurationChangeListener listener )
- {
- listeners.add( listener );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * An error in the configuration.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class InvalidConfigurationException
- extends Exception
-{
- private final String name;
-
- public InvalidConfigurationException( String name, String message )
- {
- super( message );
- this.name = name;
- }
-
- public InvalidConfigurationException( String name, String message, Throwable cause )
- {
- super( message, cause );
-
- this.name = name;
- }
-
- public String getName()
- {
- return name;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.util.StringUtils;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Enumeration;
-import java.util.Properties;
-import java.util.StringTokenizer;
-
-/**
- * @author Ben Walding
- * @author Brett Porter
- */
-public class MavenProxyPropertyLoader
-{
- private static final String REPO_LOCAL_STORE = "repo.local.store";
-
- private static final String PROXY_LIST = "proxy.list";
-
- private static final String REPO_LIST = "repo.list";
-
- public void load( Properties props, Configuration configuration )
- throws InvalidConfigurationException
- {
- // set up the managed repository
- String localCachePath = getMandatoryProperty( props, REPO_LOCAL_STORE );
-
- RepositoryConfiguration config = new RepositoryConfiguration();
- config.setDirectory( localCachePath );
- config.setName( "Imported Maven-Proxy Cache" );
- config.setId( "maven-proxy" );
- configuration.addRepository( config );
-
- //just get the first HTTP proxy and break
- String propertyList = props.getProperty( PROXY_LIST );
- if ( propertyList != null )
- {
- StringTokenizer tok = new StringTokenizer( propertyList, "," );
- while ( tok.hasMoreTokens() )
- {
- String key = tok.nextToken();
- if ( StringUtils.isNotEmpty( key ) )
- {
- Proxy proxy = new Proxy();
- proxy.setHost( getMandatoryProperty( props, "proxy." + key + ".host" ) );
- proxy.setPort( Integer.parseInt( getMandatoryProperty( props, "proxy." + key + ".port" ) ) );
-
- // the username and password isn't required
- proxy.setUsername( props.getProperty( "proxy." + key + ".username" ) );
- proxy.setPassword( props.getProperty( "proxy." + key + ".password" ) );
-
- configuration.setProxy( proxy );
-
- //accept only one proxy configuration
- break;
- }
- }
- }
-
- //get the remote repository list
- String repoList = getMandatoryProperty( props, REPO_LIST );
-
- StringTokenizer tok = new StringTokenizer( repoList, "," );
- while ( tok.hasMoreTokens() )
- {
- String key = tok.nextToken();
-
- Properties repoProps = getSubset( props, "repo." + key + "." );
- String url = getMandatoryProperty( props, "repo." + key + ".url" );
- String proxyKey = repoProps.getProperty( "proxy" );
-
- boolean cacheFailures =
- Boolean.valueOf( repoProps.getProperty( "cache.failures", "false" ) ).booleanValue();
- boolean hardFail = Boolean.valueOf( repoProps.getProperty( "hardfail", "true" ) ).booleanValue();
- int cachePeriod = Integer.parseInt( repoProps.getProperty( "cache.period", "60" ) );
-
- ProxiedRepositoryConfiguration repository = new ProxiedRepositoryConfiguration();
- repository.setId( key );
- repository.setLayout( "legacy" );
- repository.setManagedRepository( config.getId() );
- repository.setName( "Imported Maven-Proxy Remote Proxy" );
- repository.setSnapshotsInterval( cachePeriod );
- repository.setUrl( url );
- repository.setUseNetworkProxy( StringUtils.isNotEmpty( proxyKey ) );
- repository.setCacheFailures( cacheFailures );
- repository.setHardFail( hardFail );
-
- configuration.addProxiedRepository( repository );
- }
- }
-
- private Properties getSubset( Properties props, String prefix )
- {
- Enumeration keys = props.keys();
- Properties result = new Properties();
- while ( keys.hasMoreElements() )
- {
- String key = (String) keys.nextElement();
- String value = props.getProperty( key );
- if ( key.startsWith( prefix ) )
- {
- String newKey = key.substring( prefix.length() );
- result.setProperty( newKey, value );
- }
- }
- return result;
- }
-
- public void load( InputStream is, Configuration configuration )
- throws IOException, InvalidConfigurationException
- {
- Properties props = new Properties();
- props.load( is );
- load( props, configuration );
- }
-
- private String getMandatoryProperty( Properties props, String key )
- throws InvalidConfigurationException
- {
- String value = props.getProperty( key );
-
- if ( value == null )
- {
- throw new InvalidConfigurationException( key, "Missing required field: " + key );
- }
-
- return value;
- }
-}
\ No newline at end of file
<defaults>
<default>
<key>package</key>
- <value>org.apache.maven.repository.configuration</value>
+ <value>org.apache.maven.archiva.configuration</value>
</default>
</defaults>
<!-- TODO! break out subtypes such as <discovery> and create a list of blacklist -->
--- /dev/null
+#
+# Copyright 2005-2006 The Apache Software Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+Key_properties=java.lang.String
+Element_properties=java.lang.String
+++ /dev/null
-#
-# Copyright 2005-2006 The Apache Software Foundation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-Key_properties=java.lang.String
-Element_properties=java.lang.String
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+import org.codehaus.plexus.PlexusTestCase;
+import org.easymock.MockControl;
+
+import java.io.File;
+import java.util.Properties;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Test the configuration store.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @noinspection JavaDoc
+ */
+public class ConfigurationStoreTest
+ extends PlexusTestCase
+{
+ public void testInvalidFile()
+ throws Exception
+ {
+ ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "invalid-file" );
+
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+
+ // check default configuration
+ assertNotNull( "check configuration returned", configuration );
+ assertEquals( "check configuration has default elements", "0 0 * * * ?",
+ configuration.getIndexerCronExpression() );
+ assertNull( "check configuration has default elements", configuration.getIndexPath() );
+ assertTrue( "check configuration has default elements", configuration.getRepositories().isEmpty() );
+ }
+
+ public void testCorruptFile()
+ throws Exception
+ {
+ ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "corrupt-file" );
+
+ try
+ {
+ configurationStore.getConfigurationFromStore();
+ fail( "Configuration should not have succeeded" );
+ }
+ catch ( ConfigurationStoreException e )
+ {
+ // expected
+ assertTrue( true );
+ }
+ }
+
+ public void testGetConfiguration()
+ throws Exception
+ {
+ ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "default" );
+
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+
+ assertEquals( "check indexPath", ".index", configuration.getIndexPath() );
+ assertEquals( "check localRepository", "local-repository", configuration.getLocalRepository() );
+
+ assertEquals( "check managed repositories", 1, configuration.getRepositories().size() );
+ RepositoryConfiguration repository =
+ (RepositoryConfiguration) configuration.getRepositories().iterator().next();
+
+ assertEquals( "check managed repositories", "managed-repository", repository.getDirectory() );
+ assertEquals( "check managed repositories", "local", repository.getName() );
+ assertEquals( "check managed repositories", "local", repository.getId() );
+ assertEquals( "check managed repositories", "default", repository.getLayout() );
+ assertTrue( "check managed repositories", repository.isIndexed() );
+
+ assertEquals( "check proxied repositories", 1, configuration.getProxiedRepositories().size() );
+ ProxiedRepositoryConfiguration proxiedRepository =
+ (ProxiedRepositoryConfiguration) configuration.getProxiedRepositories().iterator().next();
+
+ assertEquals( "check proxied repositories", "local", proxiedRepository.getManagedRepository() );
+ assertEquals( "check proxied repositories", "http://www.ibiblio.org/maven2/", proxiedRepository.getUrl() );
+ assertEquals( "check proxied repositories", "ibiblio", proxiedRepository.getId() );
+ assertEquals( "check proxied repositories", "Ibiblio", proxiedRepository.getName() );
+ assertEquals( "check proxied repositories", 0, proxiedRepository.getSnapshotsInterval() );
+ assertEquals( "check proxied repositories", 0, proxiedRepository.getReleasesInterval() );
+ assertTrue( "check proxied repositories", proxiedRepository.isUseNetworkProxy() );
+
+ assertEquals( "check synced repositories", 1, configuration.getSyncedRepositories().size() );
+ SyncedRepositoryConfiguration syncedRepository =
+ (SyncedRepositoryConfiguration) configuration.getSyncedRepositories().iterator().next();
+
+ assertEquals( "check synced repositories", "local", syncedRepository.getManagedRepository() );
+ assertEquals( "check synced repositories", "apache", syncedRepository.getId() );
+ assertEquals( "check synced repositories", "ASF", syncedRepository.getName() );
+ assertEquals( "check synced repositories", "0 0 * * * ?", syncedRepository.getCronExpression() );
+ assertEquals( "check synced repositories", "rsync", syncedRepository.getMethod() );
+ Properties properties = new Properties();
+ properties.setProperty( "rsyncHost", "host" );
+ properties.setProperty( "rsyncMethod", "ssh" );
+ assertEquals( "check synced repositories", properties, syncedRepository.getProperties() );
+ }
+
+ public void testStoreConfiguration()
+ throws Exception
+ {
+ ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "save-file" );
+
+ Configuration configuration = new Configuration();
+ configuration.setIndexPath( "index-path" );
+
+ File file = getTestFile( "target/test/test-file.xml" );
+ file.delete();
+ assertFalse( file.exists() );
+
+ configurationStore.storeConfiguration( configuration );
+
+ assertTrue( "Check file exists", file.exists() );
+
+ // read it back
+ configuration = configurationStore.getConfigurationFromStore();
+ assertEquals( "check value", "index-path", configuration.getIndexPath() );
+ }
+
+ /**
+ * @noinspection JUnitTestMethodWithNoAssertions
+ */
+ public void testChangeListeners()
+ throws Exception
+ {
+ ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "save-file" );
+
+ MockControl control = MockControl.createControl( ConfigurationChangeListener.class );
+ ConfigurationChangeListener mock = (ConfigurationChangeListener) control.getMock();
+ configurationStore.addChangeListener( mock );
+
+ Configuration configuration = new Configuration();
+ mock.notifyOfConfigurationChange( configuration );
+ control.replay();
+
+ configurationStore.storeConfiguration( configuration );
+
+ control.verify();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.Properties;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class MavenProxyPropertyLoaderTest
+ extends PlexusTestCase
+{
+ private static final int DEFAULT_CACHE_PERIOD = 3600;
+
+ private MavenProxyPropertyLoader loader;
+
+ public void testLoadValidMavenProxyConfiguration()
+ throws IOException, InvalidConfigurationException
+ {
+ File confFile = getTestFile( "src/test/conf/maven-proxy-complete.conf" );
+
+ Configuration configuration = new Configuration();
+ Proxy proxy = new Proxy();
+ proxy.setHost( "original-host" );
+ configuration.setProxy( proxy ); // overwritten
+ configuration.setIndexPath( "index-path" ); // existing value
+
+ loader.load( new FileInputStream( confFile ), configuration );
+
+ List list = configuration.getRepositories();
+ assertEquals( "check single managed repository", 1, list.size() );
+ RepositoryConfiguration managedRepository = (RepositoryConfiguration) list.iterator().next();
+ assertEquals( "cache path changed", "target", managedRepository.getDirectory() );
+
+ assertEquals( "Count repositories", 4, configuration.getProxiedRepositories().size() );
+
+ list = configuration.getProxiedRepositories();
+ ProxiedRepositoryConfiguration repo = (ProxiedRepositoryConfiguration) list.get( 0 );
+ assertEquals( "Repository name not as expected", "local-repo", repo.getId() );
+ assertEquals( "Repository url does not match its name", "file://target", repo.getUrl() );
+ assertEquals( "Repository cache period check failed", 0, repo.getSnapshotsInterval() );
+ assertFalse( "Repository failure caching check failed", repo.isCacheFailures() );
+
+ repo = (ProxiedRepositoryConfiguration) list.get( 1 );
+ assertEquals( "Repository name not as expected", "www-ibiblio-org", repo.getId() );
+ assertEquals( "Repository url does not match its name", "http://www.ibiblio.org/maven2", repo.getUrl() );
+ assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
+ assertTrue( "Repository failure caching check failed", repo.isCacheFailures() );
+
+ repo = (ProxiedRepositoryConfiguration) list.get( 2 );
+ assertEquals( "Repository name not as expected", "dist-codehaus-org", repo.getId() );
+ assertEquals( "Repository url does not match its name", "http://dist.codehaus.org", repo.getUrl() );
+ assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
+ assertTrue( "Repository failure caching check failed", repo.isCacheFailures() );
+
+ repo = (ProxiedRepositoryConfiguration) list.get( 3 );
+ assertEquals( "Repository name not as expected", "private-example-com", repo.getId() );
+ assertEquals( "Repository url does not match its name", "http://private.example.com/internal", repo.getUrl() );
+ assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
+ assertFalse( "Repository failure caching check failed", repo.isCacheFailures() );
+ }
+
+ public void testInvalidConfiguration()
+ {
+ Configuration configuration = new Configuration();
+ try
+ {
+ loader.load( new Properties(), configuration );
+ fail( "Incomplete config should have failed" );
+ }
+ catch ( InvalidConfigurationException e )
+ {
+ assertTrue( true );
+ }
+ }
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+ loader = new MavenProxyPropertyLoader();
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-import org.codehaus.plexus.PlexusTestCase;
-import org.easymock.MockControl;
-
-import java.io.File;
-import java.util.Properties;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Test the configuration store.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @noinspection JavaDoc
- */
-public class ConfigurationStoreTest
- extends PlexusTestCase
-{
- public void testInvalidFile()
- throws Exception
- {
- ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "invalid-file" );
-
- Configuration configuration = configurationStore.getConfigurationFromStore();
-
- // check default configuration
- assertNotNull( "check configuration returned", configuration );
- assertEquals( "check configuration has default elements", "0 0 * * * ?",
- configuration.getIndexerCronExpression() );
- assertNull( "check configuration has default elements", configuration.getIndexPath() );
- assertTrue( "check configuration has default elements", configuration.getRepositories().isEmpty() );
- }
-
- public void testCorruptFile()
- throws Exception
- {
- ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "corrupt-file" );
-
- try
- {
- configurationStore.getConfigurationFromStore();
- fail( "Configuration should not have succeeded" );
- }
- catch ( ConfigurationStoreException e )
- {
- // expected
- assertTrue( true );
- }
- }
-
- public void testGetConfiguration()
- throws Exception
- {
- ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "default" );
-
- Configuration configuration = configurationStore.getConfigurationFromStore();
-
- assertEquals( "check indexPath", ".index", configuration.getIndexPath() );
- assertEquals( "check localRepository", "local-repository", configuration.getLocalRepository() );
-
- assertEquals( "check managed repositories", 1, configuration.getRepositories().size() );
- RepositoryConfiguration repository =
- (RepositoryConfiguration) configuration.getRepositories().iterator().next();
-
- assertEquals( "check managed repositories", "managed-repository", repository.getDirectory() );
- assertEquals( "check managed repositories", "local", repository.getName() );
- assertEquals( "check managed repositories", "local", repository.getId() );
- assertEquals( "check managed repositories", "default", repository.getLayout() );
- assertTrue( "check managed repositories", repository.isIndexed() );
-
- assertEquals( "check proxied repositories", 1, configuration.getProxiedRepositories().size() );
- ProxiedRepositoryConfiguration proxiedRepository =
- (ProxiedRepositoryConfiguration) configuration.getProxiedRepositories().iterator().next();
-
- assertEquals( "check proxied repositories", "local", proxiedRepository.getManagedRepository() );
- assertEquals( "check proxied repositories", "http://www.ibiblio.org/maven2/", proxiedRepository.getUrl() );
- assertEquals( "check proxied repositories", "ibiblio", proxiedRepository.getId() );
- assertEquals( "check proxied repositories", "Ibiblio", proxiedRepository.getName() );
- assertEquals( "check proxied repositories", 0, proxiedRepository.getSnapshotsInterval() );
- assertEquals( "check proxied repositories", 0, proxiedRepository.getReleasesInterval() );
- assertTrue( "check proxied repositories", proxiedRepository.isUseNetworkProxy() );
-
- assertEquals( "check synced repositories", 1, configuration.getSyncedRepositories().size() );
- SyncedRepositoryConfiguration syncedRepository =
- (SyncedRepositoryConfiguration) configuration.getSyncedRepositories().iterator().next();
-
- assertEquals( "check synced repositories", "local", syncedRepository.getManagedRepository() );
- assertEquals( "check synced repositories", "apache", syncedRepository.getId() );
- assertEquals( "check synced repositories", "ASF", syncedRepository.getName() );
- assertEquals( "check synced repositories", "0 0 * * * ?", syncedRepository.getCronExpression() );
- assertEquals( "check synced repositories", "rsync", syncedRepository.getMethod() );
- Properties properties = new Properties();
- properties.setProperty( "rsyncHost", "host" );
- properties.setProperty( "rsyncMethod", "ssh" );
- assertEquals( "check synced repositories", properties, syncedRepository.getProperties() );
- }
-
- public void testStoreConfiguration()
- throws Exception
- {
- ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "save-file" );
-
- Configuration configuration = new Configuration();
- configuration.setIndexPath( "index-path" );
-
- File file = getTestFile( "target/test/test-file.xml" );
- file.delete();
- assertFalse( file.exists() );
-
- configurationStore.storeConfiguration( configuration );
-
- assertTrue( "Check file exists", file.exists() );
-
- // read it back
- configuration = configurationStore.getConfigurationFromStore();
- assertEquals( "check value", "index-path", configuration.getIndexPath() );
- }
-
- /**
- * @noinspection JUnitTestMethodWithNoAssertions
- */
- public void testChangeListeners()
- throws Exception
- {
- ConfigurationStore configurationStore = (ConfigurationStore) lookup( ConfigurationStore.ROLE, "save-file" );
-
- MockControl control = MockControl.createControl( ConfigurationChangeListener.class );
- ConfigurationChangeListener mock = (ConfigurationChangeListener) control.getMock();
- configurationStore.addChangeListener( mock );
-
- Configuration configuration = new Configuration();
- mock.notifyOfConfigurationChange( configuration );
- control.replay();
-
- configurationStore.storeConfiguration( configuration );
-
- control.verify();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.List;
-import java.util.Properties;
-
-/**
- * @author Edwin Punzalan
- */
-public class MavenProxyPropertyLoaderTest
- extends PlexusTestCase
-{
- private static final int DEFAULT_CACHE_PERIOD = 3600;
-
- private MavenProxyPropertyLoader loader;
-
- public void testLoadValidMavenProxyConfiguration()
- throws IOException, InvalidConfigurationException
- {
- File confFile = getTestFile( "src/test/conf/maven-proxy-complete.conf" );
-
- Configuration configuration = new Configuration();
- Proxy proxy = new Proxy();
- proxy.setHost( "original-host" );
- configuration.setProxy( proxy ); // overwritten
- configuration.setIndexPath( "index-path" ); // existing value
-
- loader.load( new FileInputStream( confFile ), configuration );
-
- List list = configuration.getRepositories();
- assertEquals( "check single managed repository", 1, list.size() );
- RepositoryConfiguration managedRepository = (RepositoryConfiguration) list.iterator().next();
- assertEquals( "cache path changed", "target", managedRepository.getDirectory() );
-
- assertEquals( "Count repositories", 4, configuration.getProxiedRepositories().size() );
-
- list = configuration.getProxiedRepositories();
- ProxiedRepositoryConfiguration repo = (ProxiedRepositoryConfiguration) list.get( 0 );
- assertEquals( "Repository name not as expected", "local-repo", repo.getId() );
- assertEquals( "Repository url does not match its name", "file://target", repo.getUrl() );
- assertEquals( "Repository cache period check failed", 0, repo.getSnapshotsInterval() );
- assertFalse( "Repository failure caching check failed", repo.isCacheFailures() );
-
- repo = (ProxiedRepositoryConfiguration) list.get( 1 );
- assertEquals( "Repository name not as expected", "www-ibiblio-org", repo.getId() );
- assertEquals( "Repository url does not match its name", "http://www.ibiblio.org/maven2", repo.getUrl() );
- assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
- assertTrue( "Repository failure caching check failed", repo.isCacheFailures() );
-
- repo = (ProxiedRepositoryConfiguration) list.get( 2 );
- assertEquals( "Repository name not as expected", "dist-codehaus-org", repo.getId() );
- assertEquals( "Repository url does not match its name", "http://dist.codehaus.org", repo.getUrl() );
- assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
- assertTrue( "Repository failure caching check failed", repo.isCacheFailures() );
-
- repo = (ProxiedRepositoryConfiguration) list.get( 3 );
- assertEquals( "Repository name not as expected", "private-example-com", repo.getId() );
- assertEquals( "Repository url does not match its name", "http://private.example.com/internal", repo.getUrl() );
- assertEquals( "Repository cache period check failed", DEFAULT_CACHE_PERIOD, repo.getSnapshotsInterval() );
- assertFalse( "Repository failure caching check failed", repo.isCacheFailures() );
- }
-
- public void testInvalidConfiguration()
- {
- Configuration configuration = new Configuration();
- try
- {
- loader.load( new Properties(), configuration );
- fail( "Incomplete config should have failed" );
- }
- catch ( InvalidConfigurationException e )
- {
- assertTrue( true );
- }
- }
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- loader = new MavenProxyPropertyLoader();
- }
-}
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ConfigurationStore</role>
+ <role-hint>default</role-hint>
+ <implementation>org.apache.maven.archiva.configuration.DefaultConfigurationStore</implementation>
+ <configuration>
+ <file>${basedir}/src/test/conf/repository-manager.xml</file>
+ </configuration>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ConfigurationStore</role>
+ <role-hint>corrupt-file</role-hint>
+ <implementation>org.apache.maven.archiva.configuration.DefaultConfigurationStore</implementation>
+ <configuration>
+ <file>${basedir}/src/test/conf/corrupt.xml</file>
+ </configuration>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ConfigurationStore</role>
+ <role-hint>invalid-file</role-hint>
+ <implementation>org.apache.maven.archiva.configuration.DefaultConfigurationStore</implementation>
+ <configuration>
+ <file>${basedir}/src/test/conf/nada.txt</file>
+ </configuration>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ConfigurationStore</role>
+ <role-hint>save-file</role-hint>
+ <implementation>org.apache.maven.archiva.configuration.DefaultConfigurationStore</implementation>
+ <configuration>
+ <file>${basedir}/target/test/test-file.xml</file>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.repository.configuration.ConfigurationStore</role>
- <role-hint>default</role-hint>
- <implementation>org.apache.maven.repository.configuration.DefaultConfigurationStore</implementation>
- <configuration>
- <file>${basedir}/src/test/conf/repository-manager.xml</file>
- </configuration>
- </component>
- <component>
- <role>org.apache.maven.repository.configuration.ConfigurationStore</role>
- <role-hint>corrupt-file</role-hint>
- <implementation>org.apache.maven.repository.configuration.DefaultConfigurationStore</implementation>
- <configuration>
- <file>${basedir}/src/test/conf/corrupt.xml</file>
- </configuration>
- </component>
- <component>
- <role>org.apache.maven.repository.configuration.ConfigurationStore</role>
- <role-hint>invalid-file</role-hint>
- <implementation>org.apache.maven.repository.configuration.DefaultConfigurationStore</implementation>
- <configuration>
- <file>${basedir}/src/test/conf/nada.txt</file>
- </configuration>
- </component>
- <component>
- <role>org.apache.maven.repository.configuration.ConfigurationStore</role>
- <role-hint>save-file</role-hint>
- <implementation>org.apache.maven.repository.configuration.DefaultConfigurationStore</implementation>
- <configuration>
- <file>${basedir}/target/test/test-file.xml</file>
- </configuration>
- </component>
- </components>
-</component-set>
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.converter.transaction.FileTransaction;
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
+import org.apache.maven.model.DistributionManagement;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.Relocation;
+import org.apache.maven.model.converter.ArtifactPomRewriter;
+import org.apache.maven.model.converter.ModelConverter;
+import org.apache.maven.model.converter.PomTranslationException;
+import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
+import org.apache.maven.model.v3_0_0.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.i18n.I18N;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Properties;
+import java.util.regex.Matcher;
+
+/**
+ * Implementation of repository conversion class.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default"
+ */
+public class DefaultRepositoryConverter
+ implements RepositoryConverter
+{
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ private Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ private Digester md5Digester;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactPomRewriter rewriter;
+
+ /**
+ * @plexus.requirement
+ */
+ private ModelConverter translator;
+
+ /**
+ * @plexus.configuration default-value="false"
+ */
+ private boolean force;
+
+ /**
+ * @plexus.configuration default-value="false"
+ */
+ private boolean dryrun;
+
+ /**
+ * @plexus.requirement
+ */
+ private I18N i18n;
+
+ public void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ throws RepositoryConversionException
+ {
+ if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
+ {
+ throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
+ }
+
+ if ( validateMetadata( artifact, reporter ) )
+ {
+ FileTransaction transaction = new FileTransaction();
+
+ if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+ {
+ if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+ {
+ Metadata metadata = createBaseMetadata( artifact );
+ Versioning versioning = new Versioning();
+ versioning.addVersion( artifact.getBaseVersion() );
+ metadata.setVersioning( versioning );
+ updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
+ transaction );
+
+ metadata = createBaseMetadata( artifact );
+ metadata.setVersion( artifact.getBaseVersion() );
+ versioning = new Versioning();
+
+ Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
+ if ( matcher.matches() )
+ {
+ Snapshot snapshot = new Snapshot();
+ snapshot.setBuildNumber( Integer.valueOf( matcher.group( 3 ) ).intValue() );
+ snapshot.setTimestamp( matcher.group( 2 ) );
+ versioning.setSnapshot( snapshot );
+ }
+
+ // TODO: merge latest/release/snapshot from source instead
+ metadata.setVersioning( versioning );
+ updateMetadata( new SnapshotArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
+ transaction );
+
+ if ( !dryrun )
+ {
+ transaction.commit();
+ }
+ reporter.addSuccess( artifact );
+ }
+ }
+ }
+ }
+
+ private static Metadata createBaseMetadata( Artifact artifact )
+ {
+ Metadata metadata = new Metadata();
+ metadata.setArtifactId( artifact.getArtifactId() );
+ metadata.setGroupId( artifact.getGroupId() );
+ return metadata;
+ }
+
+ private void updateMetadata( RepositoryMetadata artifactMetadata, ArtifactRepository targetRepository,
+ Metadata newMetadata, FileTransaction transaction )
+ throws RepositoryConversionException
+ {
+ File file = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+
+ Metadata metadata;
+ boolean changed;
+
+ if ( file.exists() )
+ {
+ metadata = readMetadata( file );
+ changed = metadata.merge( newMetadata );
+ }
+ else
+ {
+ changed = true;
+ metadata = newMetadata;
+ }
+
+ if ( changed )
+ {
+ StringWriter writer = null;
+ try
+ {
+ writer = new StringWriter();
+
+ MetadataXpp3Writer mappingWriter = new MetadataXpp3Writer();
+
+ mappingWriter.write( writer, metadata );
+
+ transaction.createFile( writer.toString(), file );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Error writing target metadata", e );
+ }
+ finally
+ {
+ IOUtil.close( writer );
+ }
+ }
+ }
+
+ private Metadata readMetadata( File file )
+ throws RepositoryConversionException
+ {
+ Metadata metadata;
+ MetadataXpp3Reader reader = new MetadataXpp3Reader();
+ FileReader fileReader = null;
+ try
+ {
+ fileReader = new FileReader( file );
+ metadata = reader.read( fileReader );
+ }
+ catch ( FileNotFoundException e )
+ {
+ throw new RepositoryConversionException( "Error reading target metadata", e );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Error reading target metadata", e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new RepositoryConversionException( "Error reading target metadata", e );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+ return metadata;
+ }
+
+ private boolean validateMetadata( Artifact artifact, ArtifactReporter reporter )
+ throws RepositoryConversionException
+ {
+ ArtifactRepository repository = artifact.getRepository();
+
+ boolean result = true;
+
+ RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
+ File file =
+ new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+ if ( file.exists() )
+ {
+ Metadata metadata = readMetadata( file );
+ result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ }
+
+ repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+ if ( file.exists() )
+ {
+ Metadata metadata = readMetadata( file );
+ result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ }
+
+ return result;
+ }
+
+ private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
+ ArtifactReporter reporter )
+ {
+ String groupIdKey;
+ String artifactIdKey = null;
+ String snapshotKey = null;
+ String versionKey = null;
+ String versionsKey = null;
+ if ( repositoryMetadata.storedInGroupDirectory() )
+ {
+ groupIdKey = "failure.incorrect.groupMetadata.groupId";
+ }
+ else if ( repositoryMetadata.storedInArtifactVersionDirectory() )
+ {
+ groupIdKey = "failure.incorrect.snapshotMetadata.groupId";
+ artifactIdKey = "failure.incorrect.snapshotMetadata.artifactId";
+ versionKey = "failure.incorrect.snapshotMetadata.version";
+ snapshotKey = "failure.incorrect.snapshotMetadata.snapshot";
+ }
+ else
+ {
+ groupIdKey = "failure.incorrect.artifactMetadata.groupId";
+ artifactIdKey = "failure.incorrect.artifactMetadata.artifactId";
+ versionsKey = "failure.incorrect.artifactMetadata.versions";
+ }
+
+ boolean result = true;
+
+ if ( !metadata.getGroupId().equals( artifact.getGroupId() ) )
+ {
+ reporter.addFailure( artifact, getI18NString( groupIdKey ) );
+ result = false;
+ }
+ if ( !repositoryMetadata.storedInGroupDirectory() )
+ {
+ if ( !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
+ {
+ reporter.addFailure( artifact, getI18NString( artifactIdKey ) );
+ result = false;
+ }
+ if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
+ {
+ // artifact metadata
+
+ boolean foundVersion = false;
+ if ( metadata.getVersioning() != null )
+ {
+ for ( Iterator i = metadata.getVersioning().getVersions().iterator();
+ i.hasNext() && !foundVersion; )
+ {
+ String version = (String) i.next();
+ if ( version.equals( artifact.getBaseVersion() ) )
+ {
+ foundVersion = true;
+ }
+ }
+ }
+
+ if ( !foundVersion )
+ {
+ reporter.addFailure( artifact, getI18NString( versionsKey ) );
+ result = false;
+ }
+ }
+ else
+ {
+ // snapshot metadata
+ if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
+ {
+ reporter.addFailure( artifact, getI18NString( versionKey ) );
+ result = false;
+ }
+
+ if ( artifact.isSnapshot() )
+ {
+ Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
+ if ( matcher.matches() )
+ {
+ boolean correct = false;
+ if ( metadata.getVersioning() != null && metadata.getVersioning().getSnapshot() != null )
+ {
+ Snapshot snapshot = metadata.getVersioning().getSnapshot();
+ int build = Integer.valueOf( matcher.group( 3 ) ).intValue();
+ String ts = matcher.group( 2 );
+ if ( build == snapshot.getBuildNumber() && ts.equals( snapshot.getTimestamp() ) )
+ {
+ correct = true;
+ }
+ }
+
+ if ( !correct )
+ {
+ reporter.addFailure( artifact, getI18NString( snapshotKey ) );
+ result = false;
+ }
+ }
+ }
+ }
+ }
+ return result;
+ }
+
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
+ FileTransaction transaction )
+ throws RepositoryConversionException
+ {
+ Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
+ artifact.getVersion() );
+ pom.setBaseVersion( artifact.getBaseVersion() );
+ ArtifactRepository repository = artifact.getRepository();
+ File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
+
+ boolean result = true;
+ if ( file.exists() )
+ {
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pom ) );
+
+ String contents = null;
+ boolean checksumsValid = false;
+ try
+ {
+ if ( testChecksums( artifact, file, reporter ) )
+ {
+ checksumsValid = true;
+ contents = FileUtils.fileRead( file );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Unable to read source POM: " + e.getMessage(), e );
+ }
+
+ if ( checksumsValid && contents.indexOf( "modelVersion" ) >= 0 )
+ {
+ // v4 POM
+ try
+ {
+ boolean matching = false;
+ if ( !force && targetFile.exists() )
+ {
+ String targetContents = FileUtils.fileRead( targetFile );
+ matching = targetContents.equals( contents );
+ }
+ if ( force || !matching )
+ {
+ transaction.createFile( contents, targetFile );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Unable to write target POM: " + e.getMessage(), e );
+ }
+ }
+ else
+ {
+ // v3 POM
+ StringReader stringReader = new StringReader( contents );
+ StringWriter writer = null;
+ try
+ {
+ MavenXpp3Reader v3Reader = new MavenXpp3Reader();
+ org.apache.maven.model.v3_0_0.Model v3Model = v3Reader.read( stringReader );
+
+ if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
+ {
+ Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(),
+ artifact.getArtifactId(),
+ artifact.getVersion() );
+ targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
+ }
+
+ Model v4Model = translator.translate( v3Model );
+
+ translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(),
+ v3Model.getVersion(), v3Model.getPackage() );
+
+ writer = new StringWriter();
+ MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
+ Xpp3Writer.write( writer, v4Model );
+
+ transaction.createFile( writer.toString(), targetFile );
+
+ List warnings = translator.getWarnings();
+
+ for ( Iterator i = warnings.iterator(); i.hasNext(); )
+ {
+ String message = (String) i.next();
+ reporter.addWarning( artifact, message );
+ }
+ }
+ catch ( XmlPullParserException e )
+ {
+ reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ result = false;
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Unable to write converted POM", e );
+ }
+ catch ( PomTranslationException e )
+ {
+ reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ result = false;
+ }
+ finally
+ {
+ IOUtil.close( writer );
+ }
+ }
+ }
+ else
+ {
+ reporter.addWarning( artifact, getI18NString( "warning.missing.pom" ) );
+ }
+ return result;
+ }
+
+ private boolean doRelocation( Artifact artifact, org.apache.maven.model.v3_0_0.Model v3Model,
+ ArtifactRepository repository, FileTransaction transaction )
+ throws IOException
+ {
+ Properties properties = v3Model.getProperties();
+ if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
+ properties.containsKey( "relocated.version" ) )
+ {
+ String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
+ properties.remove( "relocated.groupId" );
+
+ String newArtifactId = properties.getProperty( "relocated.artifactId", v3Model.getArtifactId() );
+ properties.remove( "relocated.artifactId" );
+
+ String newVersion = properties.getProperty( "relocated.version", v3Model.getVersion() );
+ properties.remove( "relocated.version" );
+
+ String message = properties.getProperty( "relocated.message", "" );
+ properties.remove( "relocated.message" );
+
+ if ( properties.isEmpty() )
+ {
+ v3Model.setProperties( null );
+ }
+
+ writeRelocationPom( v3Model.getGroupId(), v3Model.getArtifactId(), v3Model.getVersion(), newGroupId,
+ newArtifactId, newVersion, message, repository, transaction );
+
+ v3Model.setGroupId( newGroupId );
+ v3Model.setArtifactId( newArtifactId );
+ v3Model.setVersion( newVersion );
+
+ artifact.setGroupId( newGroupId );
+ artifact.setArtifactId( newArtifactId );
+ artifact.setVersion( newVersion );
+
+ return true;
+ }
+ else
+ {
+ return false;
+ }
+ }
+
+ private void writeRelocationPom( String groupId, String artifactId, String version, String newGroupId,
+ String newArtifactId, String newVersion, String message,
+ ArtifactRepository repository, FileTransaction transaction )
+ throws IOException
+ {
+ Model pom = new Model();
+ pom.setGroupId( groupId );
+ pom.setArtifactId( artifactId );
+ pom.setVersion( version );
+
+ DistributionManagement dMngt = new DistributionManagement();
+
+ Relocation relocation = new Relocation();
+ relocation.setGroupId( newGroupId );
+ relocation.setArtifactId( newArtifactId );
+ relocation.setVersion( newVersion );
+ if ( message != null && message.length() > 0 )
+ {
+ relocation.setMessage( message );
+ }
+
+ dMngt.setRelocation( relocation );
+
+ pom.setDistributionManagement( dMngt );
+
+ Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "pom" );
+ File pomFile = new File( repository.getBasedir(), repository.pathOf( artifact ) );
+
+ StringWriter strWriter = new StringWriter();
+ MavenXpp3Writer pomWriter = new MavenXpp3Writer();
+ pomWriter.write( strWriter, pom );
+
+ transaction.createFile( strWriter.toString(), pomFile );
+ }
+
+ private String getI18NString( String key, String arg0 )
+ {
+ return i18n.format( getClass().getName(), Locale.getDefault(), key, arg0 );
+ }
+
+ private String getI18NString( String key )
+ {
+ return i18n.getString( getClass().getName(), Locale.getDefault(), key );
+ }
+
+ private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
+ throws IOException
+ {
+
+ boolean result =
+ verifyChecksum( file, file.getName() + ".md5", md5Digester, reporter, artifact, "failure.incorrect.md5" );
+ result = result && verifyChecksum( file, file.getName() + ".sha1", sha1Digester, reporter, artifact,
+ "failure.incorrect.sha1" );
+ return result;
+ }
+
+ private boolean verifyChecksum( File file, String fileName, Digester digester, ArtifactReporter reporter,
+ Artifact artifact, String key )
+ throws IOException
+ {
+ boolean result = true;
+
+ File checksumFile = new File( file.getParentFile(), fileName );
+ if ( checksumFile.exists() )
+ {
+ String checksum = FileUtils.fileRead( checksumFile );
+ try
+ {
+ digester.verify( file, checksum );
+ }
+ catch ( DigesterException e )
+ {
+ reporter.addFailure( artifact, getI18NString( key ) );
+ result = false;
+ }
+ }
+ return result;
+ }
+
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
+ FileTransaction transaction )
+ throws RepositoryConversionException
+ {
+ File sourceFile = artifact.getFile();
+
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+
+ boolean result = true;
+ try
+ {
+ boolean matching = false;
+ if ( !force && targetFile.exists() )
+ {
+ matching = FileUtils.contentEquals( sourceFile, targetFile );
+ if ( !matching )
+ {
+ reporter.addFailure( artifact, getI18NString( "failure.target.already.exists" ) );
+ result = false;
+ }
+ }
+ if ( result )
+ {
+ if ( force || !matching )
+ {
+ if ( testChecksums( artifact, sourceFile, reporter ) )
+ {
+ transaction.copyFile( sourceFile, targetFile );
+ }
+ else
+ {
+ result = false;
+ }
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryConversionException( "Error copying artifact", e );
+ }
+ return result;
+ }
+
+ public void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ throws RepositoryConversionException
+ {
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ convert( artifact, targetRepository, reporter );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occuring during repository conversion.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class RepositoryConversionException
+ extends Exception
+{
+ public RepositoryConversionException( String message )
+ {
+ super( message );
+ }
+
+ public RepositoryConversionException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * Copy a set of artifacts from one repository to the other, converting if necessary.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryConverter
+{
+ String ROLE = RepositoryConverter.class.getName();
+
+ /**
+ * Convert a single artifact, writing it into the target repository.
+ *
+ * @param artifact the artifact to convert
+ * @param targetRepository the target repository
+ * @param reporter reporter to track the results of the conversion
+ */
+ void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ throws RepositoryConversionException;
+
+ /**
+ * Convert a set of artifacts, writing them into the target repository.
+ *
+ * @param artifacts the set of artifacts to convert
+ * @param targetRepository the target repository
+ * @param reporter reporter to track the results of the conversions
+ */
+ void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
+ throws RepositoryConversionException;
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Abstract class for the TransactionEvents
+ *
+ * @author Edwin Punzalan
+ */
+public abstract class AbstractTransactionEvent
+ implements TransactionEvent
+{
+ private File backup;
+
+ private List createdDirs;
+
+ /**
+ * Method that creates a directory as well as all the parent directories needed
+ *
+ * @param dir The File directory to be created
+ * @throws IOException when an unrecoverable error occurred
+ */
+ protected void mkDirs( File dir )
+ throws IOException
+ {
+ List createDirs = new ArrayList();
+
+ File parent = dir;
+ while ( !parent.exists() || !parent.isDirectory() )
+ {
+ createDirs.add( parent );
+
+ parent = parent.getParentFile();
+ }
+
+ createdDirs = new ArrayList();
+
+ while ( !createDirs.isEmpty() )
+ {
+ File directory = (File) createDirs.remove( createDirs.size() - 1 );
+
+ if ( directory.mkdir() )
+ {
+ createdDirs.add( directory );
+ }
+ else
+ {
+ throw new IOException( "Failed to create directory: " + directory.getAbsolutePath() );
+ }
+ }
+ }
+
+ protected void revertMkDirs()
+ throws IOException
+ {
+ if ( createdDirs != null )
+ {
+ Collections.reverse( createdDirs );
+
+ while ( !createdDirs.isEmpty() )
+ {
+ File dir = (File) createdDirs.remove( 0 );
+
+ if ( dir.isDirectory() && dir.list().length == 0 )
+ {
+ FileUtils.deleteDirectory( dir.getAbsolutePath() );
+ }
+ else
+ {
+ //cannot rollback created directory if it still contains files
+ break;
+ }
+ }
+ }
+ }
+
+ protected void createBackup( File file )
+ throws IOException
+ {
+ if ( file.exists() && file.isFile() )
+ {
+ backup = File.createTempFile( "temp-", ".backup" );
+
+ FileUtils.copyFile( file, backup );
+
+ backup.deleteOnExit();
+ }
+ }
+
+ protected void restoreBackup( File file )
+ throws IOException
+ {
+ if ( backup != null )
+ {
+ FileUtils.copyFile( backup, file );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Event to copy a file.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class CopyFileEvent
+ extends AbstractTransactionEvent
+{
+ private final File source;
+
+ private final File destination;
+
+ public CopyFileEvent( File source, File destination )
+ {
+ this.source = source;
+ this.destination = destination;
+ }
+
+ public void commit()
+ throws IOException
+ {
+ createBackup( destination );
+
+ mkDirs( destination.getParentFile() );
+
+ FileUtils.copyFile( source, destination );
+ }
+
+ public void rollback()
+ throws IOException
+ {
+ FileUtils.fileDelete( destination.getAbsolutePath() );
+
+ revertMkDirs();
+
+ restoreBackup( destination );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Event for creating a file from a string content.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class CreateFileEvent
+ extends AbstractTransactionEvent
+{
+ private final File destination;
+
+ private final String content;
+
+ public CreateFileEvent( String content, File destination )
+ {
+ this.content = content;
+ this.destination = destination;
+ }
+
+ public void commit()
+ throws IOException
+ {
+ createBackup( destination );
+
+ mkDirs( destination.getParentFile() );
+
+ if ( !destination.exists() && !destination.createNewFile() )
+ {
+ throw new IOException( "Unable to create new file" );
+ }
+
+ FileUtils.fileWrite( destination.getAbsolutePath(), content );
+ }
+
+ public void rollback()
+ throws IOException
+ {
+ FileUtils.fileDelete( destination.getAbsolutePath() );
+
+ revertMkDirs();
+
+ restoreBackup( destination );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Implement commit/rollback semantics for a set of files.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class FileTransaction
+{
+ private List events = new ArrayList();
+
+ public void commit()
+ throws RepositoryConversionException
+ {
+ List toRollback = new ArrayList( events.size() );
+
+ for ( Iterator i = events.iterator(); i.hasNext(); )
+ {
+ TransactionEvent event = (TransactionEvent) i.next();
+
+ try
+ {
+ event.commit();
+
+ toRollback.add( event );
+ }
+ catch ( IOException e )
+ {
+ try
+ {
+ rollback( toRollback );
+
+ throw new RepositoryConversionException( "Unable to commit file transaction", e );
+ }
+ catch ( IOException ioe )
+ {
+ throw new RepositoryConversionException(
+ "Unable to commit file transaction, and rollback failed with error: '" + ioe.getMessage() + "'",
+ e );
+ }
+ }
+ }
+ }
+
+ private void rollback( List toRollback )
+ throws IOException
+ {
+ for ( Iterator i = toRollback.iterator(); i.hasNext(); )
+ {
+ TransactionEvent event = (TransactionEvent) i.next();
+
+ event.rollback();
+ }
+ }
+
+ public void copyFile( File source, File destination )
+ {
+ events.add( new CopyFileEvent( source, destination ) );
+ }
+
+ public void createFile( String content, File destination )
+ {
+ events.add( new CreateFileEvent( content, destination ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+/**
+ * Interface for individual events in a transaction.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface TransactionEvent
+{
+ /**
+ * Commit this event.
+ *
+ * @throws IOException if an error occurred committing the change
+ */
+ void commit()
+ throws IOException;
+
+ /**
+ * Rollback the even already committed.
+ *
+ * @throws IOException if an error occurred reverting the change
+ */
+ void rollback()
+ throws IOException;
+}
+++ /dev/null
-package org.apache.maven.repository.converter;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.model.DistributionManagement;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
-import org.apache.maven.model.converter.ModelConverter;
-import org.apache.maven.model.converter.PomTranslationException;
-import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
-import org.apache.maven.model.v3_0_0.io.xpp3.MavenXpp3Reader;
-import org.apache.maven.repository.converter.transaction.FileTransaction;
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.apache.maven.repository.reporting.ArtifactReporter;
-import org.codehaus.plexus.i18n.I18N;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.regex.Matcher;
-
-/**
- * Implementation of repository conversion class.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.converter.RepositoryConverter" role-hint="default"
- */
-public class DefaultRepositoryConverter
- implements RepositoryConverter
-{
- /**
- * @plexus.requirement role-hint="sha1"
- */
- private Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- private Digester md5Digester;
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private ArtifactPomRewriter rewriter;
-
- /**
- * @plexus.requirement
- */
- private ModelConverter translator;
-
- /**
- * @plexus.configuration default-value="false"
- */
- private boolean force;
-
- /**
- * @plexus.configuration default-value="false"
- */
- private boolean dryrun;
-
- /**
- * @plexus.requirement
- */
- private I18N i18n;
-
- public void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
- throws RepositoryConversionException
- {
- if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
- {
- throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
- }
-
- if ( validateMetadata( artifact, reporter ) )
- {
- FileTransaction transaction = new FileTransaction();
-
- if ( copyPom( artifact, targetRepository, reporter, transaction ) )
- {
- if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
- {
- Metadata metadata = createBaseMetadata( artifact );
- Versioning versioning = new Versioning();
- versioning.addVersion( artifact.getBaseVersion() );
- metadata.setVersioning( versioning );
- updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
- transaction );
-
- metadata = createBaseMetadata( artifact );
- metadata.setVersion( artifact.getBaseVersion() );
- versioning = new Versioning();
-
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
- if ( matcher.matches() )
- {
- Snapshot snapshot = new Snapshot();
- snapshot.setBuildNumber( Integer.valueOf( matcher.group( 3 ) ).intValue() );
- snapshot.setTimestamp( matcher.group( 2 ) );
- versioning.setSnapshot( snapshot );
- }
-
- // TODO: merge latest/release/snapshot from source instead
- metadata.setVersioning( versioning );
- updateMetadata( new SnapshotArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
- transaction );
-
- if ( !dryrun )
- {
- transaction.commit();
- }
- reporter.addSuccess( artifact );
- }
- }
- }
- }
-
- private static Metadata createBaseMetadata( Artifact artifact )
- {
- Metadata metadata = new Metadata();
- metadata.setArtifactId( artifact.getArtifactId() );
- metadata.setGroupId( artifact.getGroupId() );
- return metadata;
- }
-
- private void updateMetadata( RepositoryMetadata artifactMetadata, ArtifactRepository targetRepository,
- Metadata newMetadata, FileTransaction transaction )
- throws RepositoryConversionException
- {
- File file = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
-
- Metadata metadata;
- boolean changed;
-
- if ( file.exists() )
- {
- metadata = readMetadata( file );
- changed = metadata.merge( newMetadata );
- }
- else
- {
- changed = true;
- metadata = newMetadata;
- }
-
- if ( changed )
- {
- StringWriter writer = null;
- try
- {
- writer = new StringWriter();
-
- MetadataXpp3Writer mappingWriter = new MetadataXpp3Writer();
-
- mappingWriter.write( writer, metadata );
-
- transaction.createFile( writer.toString(), file );
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error writing target metadata", e );
- }
- finally
- {
- IOUtil.close( writer );
- }
- }
- }
-
- private Metadata readMetadata( File file )
- throws RepositoryConversionException
- {
- Metadata metadata;
- MetadataXpp3Reader reader = new MetadataXpp3Reader();
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( file );
- metadata = reader.read( fileReader );
- }
- catch ( FileNotFoundException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- catch ( XmlPullParserException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- finally
- {
- IOUtil.close( fileReader );
- }
- return metadata;
- }
-
- private boolean validateMetadata( Artifact artifact, ArtifactReporter reporter )
- throws RepositoryConversionException
- {
- ArtifactRepository repository = artifact.getRepository();
-
- boolean result = true;
-
- RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
- File file =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- if ( file.exists() )
- {
- Metadata metadata = readMetadata( file );
- result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
- }
-
- repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- if ( file.exists() )
- {
- Metadata metadata = readMetadata( file );
- result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
- }
-
- return result;
- }
-
- private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
- ArtifactReporter reporter )
- {
- String groupIdKey;
- String artifactIdKey = null;
- String snapshotKey = null;
- String versionKey = null;
- String versionsKey = null;
- if ( repositoryMetadata.storedInGroupDirectory() )
- {
- groupIdKey = "failure.incorrect.groupMetadata.groupId";
- }
- else if ( repositoryMetadata.storedInArtifactVersionDirectory() )
- {
- groupIdKey = "failure.incorrect.snapshotMetadata.groupId";
- artifactIdKey = "failure.incorrect.snapshotMetadata.artifactId";
- versionKey = "failure.incorrect.snapshotMetadata.version";
- snapshotKey = "failure.incorrect.snapshotMetadata.snapshot";
- }
- else
- {
- groupIdKey = "failure.incorrect.artifactMetadata.groupId";
- artifactIdKey = "failure.incorrect.artifactMetadata.artifactId";
- versionsKey = "failure.incorrect.artifactMetadata.versions";
- }
-
- boolean result = true;
-
- if ( !metadata.getGroupId().equals( artifact.getGroupId() ) )
- {
- reporter.addFailure( artifact, getI18NString( groupIdKey ) );
- result = false;
- }
- if ( !repositoryMetadata.storedInGroupDirectory() )
- {
- if ( !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
- {
- reporter.addFailure( artifact, getI18NString( artifactIdKey ) );
- result = false;
- }
- if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
- {
- // artifact metadata
-
- boolean foundVersion = false;
- if ( metadata.getVersioning() != null )
- {
- for ( Iterator i = metadata.getVersioning().getVersions().iterator();
- i.hasNext() && !foundVersion; )
- {
- String version = (String) i.next();
- if ( version.equals( artifact.getBaseVersion() ) )
- {
- foundVersion = true;
- }
- }
- }
-
- if ( !foundVersion )
- {
- reporter.addFailure( artifact, getI18NString( versionsKey ) );
- result = false;
- }
- }
- else
- {
- // snapshot metadata
- if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
- {
- reporter.addFailure( artifact, getI18NString( versionKey ) );
- result = false;
- }
-
- if ( artifact.isSnapshot() )
- {
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
- if ( matcher.matches() )
- {
- boolean correct = false;
- if ( metadata.getVersioning() != null && metadata.getVersioning().getSnapshot() != null )
- {
- Snapshot snapshot = metadata.getVersioning().getSnapshot();
- int build = Integer.valueOf( matcher.group( 3 ) ).intValue();
- String ts = matcher.group( 2 );
- if ( build == snapshot.getBuildNumber() && ts.equals( snapshot.getTimestamp() ) )
- {
- correct = true;
- }
- }
-
- if ( !correct )
- {
- reporter.addFailure( artifact, getI18NString( snapshotKey ) );
- result = false;
- }
- }
- }
- }
- }
- return result;
- }
-
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
- FileTransaction transaction )
- throws RepositoryConversionException
- {
- Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion() );
- pom.setBaseVersion( artifact.getBaseVersion() );
- ArtifactRepository repository = artifact.getRepository();
- File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
-
- boolean result = true;
- if ( file.exists() )
- {
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pom ) );
-
- String contents = null;
- boolean checksumsValid = false;
- try
- {
- if ( testChecksums( artifact, file, reporter ) )
- {
- checksumsValid = true;
- contents = FileUtils.fileRead( file );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to read source POM: " + e.getMessage(), e );
- }
-
- if ( checksumsValid && contents.indexOf( "modelVersion" ) >= 0 )
- {
- // v4 POM
- try
- {
- boolean matching = false;
- if ( !force && targetFile.exists() )
- {
- String targetContents = FileUtils.fileRead( targetFile );
- matching = targetContents.equals( contents );
- }
- if ( force || !matching )
- {
- transaction.createFile( contents, targetFile );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to write target POM: " + e.getMessage(), e );
- }
- }
- else
- {
- // v3 POM
- StringReader stringReader = new StringReader( contents );
- StringWriter writer = null;
- try
- {
- MavenXpp3Reader v3Reader = new MavenXpp3Reader();
- org.apache.maven.model.v3_0_0.Model v3Model = v3Reader.read( stringReader );
-
- if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
- {
- Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
- targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
- }
-
- Model v4Model = translator.translate( v3Model );
-
- translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(),
- v3Model.getVersion(), v3Model.getPackage() );
-
- writer = new StringWriter();
- MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
- Xpp3Writer.write( writer, v4Model );
-
- transaction.createFile( writer.toString(), targetFile );
-
- List warnings = translator.getWarnings();
-
- for ( Iterator i = warnings.iterator(); i.hasNext(); )
- {
- String message = (String) i.next();
- reporter.addWarning( artifact, message );
- }
- }
- catch ( XmlPullParserException e )
- {
- reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
- result = false;
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to write converted POM", e );
- }
- catch ( PomTranslationException e )
- {
- reporter.addFailure( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
- result = false;
- }
- finally
- {
- IOUtil.close( writer );
- }
- }
- }
- else
- {
- reporter.addWarning( artifact, getI18NString( "warning.missing.pom" ) );
- }
- return result;
- }
-
- private boolean doRelocation( Artifact artifact, org.apache.maven.model.v3_0_0.Model v3Model,
- ArtifactRepository repository, FileTransaction transaction )
- throws IOException
- {
- Properties properties = v3Model.getProperties();
- if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
- properties.containsKey( "relocated.version" ) )
- {
- String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
- properties.remove( "relocated.groupId" );
-
- String newArtifactId = properties.getProperty( "relocated.artifactId", v3Model.getArtifactId() );
- properties.remove( "relocated.artifactId" );
-
- String newVersion = properties.getProperty( "relocated.version", v3Model.getVersion() );
- properties.remove( "relocated.version" );
-
- String message = properties.getProperty( "relocated.message", "" );
- properties.remove( "relocated.message" );
-
- if ( properties.isEmpty() )
- {
- v3Model.setProperties( null );
- }
-
- writeRelocationPom( v3Model.getGroupId(), v3Model.getArtifactId(), v3Model.getVersion(), newGroupId,
- newArtifactId, newVersion, message, repository, transaction );
-
- v3Model.setGroupId( newGroupId );
- v3Model.setArtifactId( newArtifactId );
- v3Model.setVersion( newVersion );
-
- artifact.setGroupId( newGroupId );
- artifact.setArtifactId( newArtifactId );
- artifact.setVersion( newVersion );
-
- return true;
- }
- else
- {
- return false;
- }
- }
-
- private void writeRelocationPom( String groupId, String artifactId, String version, String newGroupId,
- String newArtifactId, String newVersion, String message,
- ArtifactRepository repository, FileTransaction transaction )
- throws IOException
- {
- Model pom = new Model();
- pom.setGroupId( groupId );
- pom.setArtifactId( artifactId );
- pom.setVersion( version );
-
- DistributionManagement dMngt = new DistributionManagement();
-
- Relocation relocation = new Relocation();
- relocation.setGroupId( newGroupId );
- relocation.setArtifactId( newArtifactId );
- relocation.setVersion( newVersion );
- if ( message != null && message.length() > 0 )
- {
- relocation.setMessage( message );
- }
-
- dMngt.setRelocation( relocation );
-
- pom.setDistributionManagement( dMngt );
-
- Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "pom" );
- File pomFile = new File( repository.getBasedir(), repository.pathOf( artifact ) );
-
- StringWriter strWriter = new StringWriter();
- MavenXpp3Writer pomWriter = new MavenXpp3Writer();
- pomWriter.write( strWriter, pom );
-
- transaction.createFile( strWriter.toString(), pomFile );
- }
-
- private String getI18NString( String key, String arg0 )
- {
- return i18n.format( getClass().getName(), Locale.getDefault(), key, arg0 );
- }
-
- private String getI18NString( String key )
- {
- return i18n.getString( getClass().getName(), Locale.getDefault(), key );
- }
-
- private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
- throws IOException
- {
-
- boolean result =
- verifyChecksum( file, file.getName() + ".md5", md5Digester, reporter, artifact, "failure.incorrect.md5" );
- result = result && verifyChecksum( file, file.getName() + ".sha1", sha1Digester, reporter, artifact,
- "failure.incorrect.sha1" );
- return result;
- }
-
- private boolean verifyChecksum( File file, String fileName, Digester digester, ArtifactReporter reporter,
- Artifact artifact, String key )
- throws IOException
- {
- boolean result = true;
-
- File checksumFile = new File( file.getParentFile(), fileName );
- if ( checksumFile.exists() )
- {
- String checksum = FileUtils.fileRead( checksumFile );
- try
- {
- digester.verify( file, checksum );
- }
- catch ( DigesterException e )
- {
- reporter.addFailure( artifact, getI18NString( key ) );
- result = false;
- }
- }
- return result;
- }
-
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
- FileTransaction transaction )
- throws RepositoryConversionException
- {
- File sourceFile = artifact.getFile();
-
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-
- boolean result = true;
- try
- {
- boolean matching = false;
- if ( !force && targetFile.exists() )
- {
- matching = FileUtils.contentEquals( sourceFile, targetFile );
- if ( !matching )
- {
- reporter.addFailure( artifact, getI18NString( "failure.target.already.exists" ) );
- result = false;
- }
- }
- if ( result )
- {
- if ( force || !matching )
- {
- if ( testChecksums( artifact, sourceFile, reporter ) )
- {
- transaction.copyFile( sourceFile, targetFile );
- }
- else
- {
- result = false;
- }
- }
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error copying artifact", e );
- }
- return result;
- }
-
- public void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
- throws RepositoryConversionException
- {
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- convert( artifact, targetRepository, reporter );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occuring during repository conversion.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class RepositoryConversionException
- extends Exception
-{
- public RepositoryConversionException( String message )
- {
- super( message );
- }
-
- public RepositoryConversionException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.repository.reporting.ArtifactReporter;
-
-import java.util.List;
-import java.io.File;
-
-/**
- * Copy a set of artifacts from one repository to the other, converting if necessary.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryConverter
-{
- String ROLE = RepositoryConverter.class.getName();
-
- /**
- * Convert a single artifact, writing it into the target repository.
- *
- * @param artifact the artifact to convert
- * @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversion
- */
- void convert( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter )
- throws RepositoryConversionException;
-
- /**
- * Convert a set of artifacts, writing them into the target repository.
- *
- * @param artifacts the set of artifacts to convert
- * @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversions
- */
- void convert( List artifacts, ArtifactRepository targetRepository, ArtifactReporter reporter )
- throws RepositoryConversionException;
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.ArrayList;
-
-/**
- * Abstract class for the TransactionEvents
- *
- * @author Edwin Punzalan
- */
-public abstract class AbstractTransactionEvent
- implements TransactionEvent
-{
- private File backup;
-
- private List createdDirs;
-
- /**
- * Method that creates a directory as well as all the parent directories needed
- *
- * @param dir The File directory to be created
- * @throws IOException when an unrecoverable error occurred
- */
- protected void mkDirs( File dir )
- throws IOException
- {
- List createDirs = new ArrayList();
-
- File parent = dir;
- while( !parent.exists() || !parent.isDirectory() )
- {
- createDirs.add( parent );
-
- parent = parent.getParentFile();
- }
-
- createdDirs = new ArrayList();
-
- while ( !createDirs.isEmpty() )
- {
- File directory = (File) createDirs.remove( createDirs.size() - 1 );
-
- if ( directory.mkdir() )
- {
- createdDirs.add( directory );
- }
- else
- {
- throw new IOException( "Failed to create directory: " + directory.getAbsolutePath() );
- }
- }
- }
-
- protected void revertMkDirs()
- throws IOException
- {
- if ( createdDirs != null )
- {
- Collections.reverse( createdDirs );
-
- while( !createdDirs.isEmpty() )
- {
- File dir = (File) createdDirs.remove( 0 );
-
- if ( dir.isDirectory() && dir.list().length == 0 )
- {
- FileUtils.deleteDirectory( dir.getAbsolutePath() );
- }
- else
- {
- //cannot rollback created directory if it still contains files
- break;
- }
- }
- }
- }
-
- protected void createBackup( File file )
- throws IOException
- {
- if ( file.exists() && file.isFile() )
- {
- backup = File.createTempFile( "temp-", ".backup" );
-
- FileUtils.copyFile( file, backup );
-
- backup.deleteOnExit();
- }
- }
-
- protected void restoreBackup( File file )
- throws IOException
- {
- if ( backup != null )
- {
- FileUtils.copyFile( backup, file );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Event to copy a file.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class CopyFileEvent
- extends AbstractTransactionEvent
-{
- private final File source;
-
- private final File destination;
-
- public CopyFileEvent( File source, File destination )
- {
- this.source = source;
- this.destination = destination;
- }
-
- public void commit()
- throws IOException
- {
- createBackup( destination );
-
- mkDirs( destination.getParentFile() );
-
- FileUtils.copyFile( source, destination );
- }
-
- public void rollback()
- throws IOException
- {
- FileUtils.fileDelete( destination.getAbsolutePath() );
-
- revertMkDirs();
-
- restoreBackup( destination );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Event for creating a file from a string content.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class CreateFileEvent
- extends AbstractTransactionEvent
-{
- private final File destination;
-
- private final String content;
-
- public CreateFileEvent( String content, File destination )
- {
- this.content = content;
- this.destination = destination;
- }
-
- public void commit()
- throws IOException
- {
- createBackup( destination );
-
- mkDirs( destination.getParentFile() );
-
- if ( !destination.exists() && !destination.createNewFile() )
- {
- throw new IOException( "Unable to create new file" );
- }
-
- FileUtils.fileWrite( destination.getAbsolutePath(), content );
- }
-
- public void rollback()
- throws IOException
- {
- FileUtils.fileDelete( destination.getAbsolutePath() );
-
- revertMkDirs();
-
- restoreBackup( destination );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.converter.RepositoryConversionException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Implement commit/rollback semantics for a set of files.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class FileTransaction
-{
- private List events = new ArrayList();
-
- public void commit()
- throws RepositoryConversionException
- {
- List toRollback = new ArrayList( events.size() );
-
- for ( Iterator i = events.iterator(); i.hasNext(); )
- {
- TransactionEvent event = (TransactionEvent) i.next();
-
- try
- {
- event.commit();
-
- toRollback.add( event );
- }
- catch ( IOException e )
- {
- try
- {
- rollback( toRollback );
-
- throw new RepositoryConversionException( "Unable to commit file transaction", e );
- }
- catch ( IOException ioe )
- {
- throw new RepositoryConversionException(
- "Unable to commit file transaction, and rollback failed with error: '" + ioe.getMessage() + "'",
- e );
- }
- }
- }
- }
-
- private void rollback( List toRollback )
- throws IOException
- {
- for ( Iterator i = toRollback.iterator(); i.hasNext(); )
- {
- TransactionEvent event = (TransactionEvent) i.next();
-
- event.rollback();
- }
- }
-
- public void copyFile( File source, File destination )
- {
- events.add( new CopyFileEvent( source, destination ) );
- }
-
- public void createFile( String content, File destination )
- {
- events.add( new CreateFileEvent( content, destination ) );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-/**
- * Interface for individual events in a transaction.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface TransactionEvent
-{
- /**
- * Commit this event.
- *
- * @throws IOException if an error occurred committing the change
- */
- void commit()
- throws IOException;
-
- /**
- * Rollback the even already committed.
- *
- * @throws IOException if an error occurred reverting the change
- */
- void rollback()
- throws IOException;
-}
--- /dev/null
+#
+# Copyright 2005-2006 The Apache Software Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+failure.incorrect.md5=The MD5 checksum value was incorrect.
+failure.incorrect.sha1=The SHA1 checksum value was incorrect.
+failure.target.already.exists=The artifact could not be converted because it already exists.
+failure.invalid.source.pom=The source POM was invalid: {0}.
+
+warning.missing.pom=The artifact had no POM in the source repository.
+
+exception.repositories.match=Source and target repositories are identical.
+
+failure.incorrect.groupMetadata.groupId=The group ID in the source group metadata is incorrect.
+
+failure.incorrect.artifactMetadata.artifactId=The artifact ID in the source artifact metadata is incorrect.
+failure.incorrect.artifactMetadata.groupId=The group ID in the source artifact metadata is incorrect.
+failure.incorrect.artifactMetadata.versions=The version list in the source artifact metadata is incorrect.
+
+failure.incorrect.snapshotMetadata.artifactId=The artifact ID in the source artifact version metadata is incorrect.
+failure.incorrect.snapshotMetadata.groupId=The group ID in the source artifact version metadata is incorrect.
+failure.incorrect.snapshotMetadata.version=The version in the source artifact version metadata is incorrect.
+failure.incorrect.snapshotMetadata.snapshot=The snapshot information in the source artifact version metadata is incorrect.
+++ /dev/null
-#
-# Copyright 2005-2006 The Apache Software Foundation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-failure.incorrect.md5=The MD5 checksum value was incorrect.
-failure.incorrect.sha1=The SHA1 checksum value was incorrect.
-failure.target.already.exists=The artifact could not be converted because it already exists.
-failure.invalid.source.pom=The source POM was invalid: {0}.
-
-warning.missing.pom=The artifact had no POM in the source repository.
-
-exception.repositories.match=Source and target repositories are identical.
-
-failure.incorrect.groupMetadata.groupId=The group ID in the source group metadata is incorrect.
-
-failure.incorrect.artifactMetadata.artifactId=The artifact ID in the source artifact metadata is incorrect.
-failure.incorrect.artifactMetadata.groupId=The group ID in the source artifact metadata is incorrect.
-failure.incorrect.artifactMetadata.versions=The version list in the source artifact metadata is incorrect.
-
-failure.incorrect.snapshotMetadata.artifactId=The artifact ID in the source artifact version metadata is incorrect.
-failure.incorrect.snapshotMetadata.groupId=The group ID in the source artifact version metadata is incorrect.
-failure.incorrect.snapshotMetadata.version=The version in the source artifact version metadata is incorrect.
-failure.incorrect.snapshotMetadata.snapshot=The snapshot information in the source artifact version metadata is incorrect.
--- /dev/null
+package org.apache.maven.archiva.converter;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.archiva.reporting.ArtifactResult;
+import org.apache.maven.archiva.reporting.DefaultArtifactReporter;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.metadata.ArtifactMetadata;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.i18n.I18N;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.regex.Matcher;
+
+/**
+ * Test the repository converter.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo what about deletions from the source repository?
+ * @todo use artifact-test instead
+ * @todo should reject if dependencies are missing - rely on reporting?
+ * @todo group metadata
+ */
+public class RepositoryConverterTest
+ extends PlexusTestCase
+{
+ private ArtifactRepository sourceRepository;
+
+ private ArtifactRepository targetRepository;
+
+ private RepositoryConverter repositoryConverter;
+
+ private ArtifactFactory artifactFactory;
+
+ private ArtifactReporter reporter;
+
+ private static final int SLEEP_MILLIS = 100;
+
+ private I18N i18n;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
+
+ File sourceBase = getTestFile( "src/test/source-repository" );
+ sourceRepository =
+ factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+
+ layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File targetBase = getTestFile( "target/test-target-repository" );
+ copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
+
+ targetRepository =
+ factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null );
+
+ repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ i18n = (I18N) lookup( I18N.ROLE );
+
+ reporter = new DefaultArtifactReporter();
+ }
+
+ private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
+ throws IOException
+ {
+ if ( !sourceDirectory.exists() )
+ {
+ throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
+ }
+
+ File[] files = sourceDirectory.listFiles();
+
+ String sourcePath = sourceDirectory.getAbsolutePath();
+
+ for ( int i = 0; i < files.length; i++ )
+ {
+ File file = files[i];
+
+ String dest = file.getAbsolutePath();
+
+ dest = dest.substring( sourcePath.length() + 1 );
+
+ File destination = new File( destinationDirectory, dest );
+
+ if ( file.isFile() )
+ {
+ destination = destination.getParentFile();
+
+ FileUtils.copyFileToDirectory( file, destination );
+ }
+ else if ( file.isDirectory() )
+ {
+ if ( !".svn".equals( file.getName() ) )
+ {
+ if ( !destination.exists() && !destination.mkdirs() )
+ {
+ throw new IOException(
+ "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+ }
+ copyDirectoryStructure( file, destination );
+ }
+ }
+ else
+ {
+ throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
+ }
+ }
+ }
+
+ public void testV4PomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ // test that it is copied as is
+
+ Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File versionMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ versionMetadataFile.delete();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ artifactFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( sourcePomFile, pomFile );
+
+ assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+
+ assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
+
+ expectedMetadataFile = getTestFile( "src/test/expected-files/v4-version-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, versionMetadataFile );
+ }
+
+ public void testV3PomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ // test that the pom is coverted
+
+ Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File versionMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ versionMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( expectedPomFile, pomFile );
+
+ assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+
+ assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
+
+ expectedMetadataFile = getTestFile( "src/test/expected-files/v3-version-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, versionMetadataFile );
+ }
+
+ public void testV3PomConvertWithRelocation()
+ throws RepositoryConversionException, IOException
+ {
+ Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File versionMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ versionMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ //checkSuccess(); --> commented until MNG-2100 is fixed
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check if relocated artifact created", artifactFile.exists() );
+ assertTrue( "Check if relocated artifact matches",
+ FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+ Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
+ File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
+ File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
+ compareFiles( pomFile, testFile );
+
+ Artifact orig = createArtifact( "test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
+ artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( orig ) );
+ assertTrue( "Check if relocation artifact pom is created", artifactFile.exists() );
+ testFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( orig ) );
+ compareFiles( artifactFile, testFile );
+ }
+
+ public void testV3PomWarningsOnConvert()
+ throws RepositoryConversionException, IOException
+ {
+ // test that the pom is converted but that warnings are reported
+
+ Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File versionMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ versionMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ assertEquals( "check no errors", 0, reporter.getFailures() );
+ assertEquals( "check number of warnings", 2, reporter.getWarnings() );
+ assertEquals( "check success", 1, reporter.getSuccesses() );
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-warnings.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( expectedPomFile, pomFile );
+
+ // TODO: check 2 warnings (extend and versions) matched on i18n key
+ }
+
+ private void doTestV4SnapshotPomConvert( String version, String expectedMetadataFileName )
+ throws RepositoryConversionException, IOException
+ {
+ // test that it is copied as is
+
+ Artifact artifact = createArtifact( "test", "v4artifact", version );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ snapshotMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( sourcePomFile, pomFile );
+
+ assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-snapshot-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+
+ assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
+
+ expectedMetadataFile = getTestFile( expectedMetadataFileName );
+
+ compareFiles( expectedMetadataFile, snapshotMetadataFile );
+ }
+
+ public void testV3SnapshotPomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ // test that the pom is coverted
+
+ Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ snapshotMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-snapshot.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( expectedPomFile, pomFile );
+
+ assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+
+ assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
+
+ expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, snapshotMetadataFile );
+ }
+
+ public void testV4SnapshotPomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ doTestV4SnapshotPomConvert( "1.0.0-SNAPSHOT", "src/test/expected-files/v4-snapshot-metadata.xml" );
+
+ assertTrue( true );
+ }
+
+ public void testV4TimestampedSnapshotPomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ doTestV4SnapshotPomConvert( "1.0.0-20060111.120115-1",
+ "src/test/expected-files/v4-timestamped-snapshot-metadata.xml" );
+
+ assertTrue( true );
+ }
+
+ public void testV3TimestampedSnapshotPomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ // test that the pom is coverted
+
+ Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ artifactMetadataFile.delete();
+
+ ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ snapshotMetadataFile.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-timestamped-snapshot.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( expectedPomFile, pomFile );
+
+ assertTrue( "Check artifact snapshotMetadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+
+ assertTrue( "Check snapshot snapshotMetadata created", snapshotMetadataFile.exists() );
+
+ expectedMetadataFile = getTestFile( "src/test/expected-files/v3-timestamped-snapshot-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, snapshotMetadataFile );
+ }
+
+ public void testNoPomConvert()
+ throws IOException, RepositoryConversionException
+ {
+ // test that a POM is not created when there was none at the source
+
+ Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ assertEquals( "check no errors", 0, reporter.getFailures() );
+ assertEquals( "check no warnings", 1, reporter.getWarnings() );
+ assertEquals( "check success", 1, reporter.getSuccesses() );
+ assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+
+ assertFalse( "Check no POM created", pomFile.exists() );
+ assertFalse( "No source POM", sourcePomFile.exists() );
+ }
+
+ public void testIncorrectSourceChecksumMd5()
+ throws RepositoryConversionException
+ {
+ // test that it fails when the source md5 is wrong
+
+ Artifact artifact = createArtifact( "test", "incorrectMd5Artifact", "1.0.0" );
+ File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ file.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
+
+ assertFalse( "Check artifact not created", file.exists() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testIncorrectSourceChecksumSha1()
+ throws RepositoryConversionException
+ {
+ // test that it fails when the source sha1 is wrong
+
+ Artifact artifact = createArtifact( "test", "incorrectSha1Artifact", "1.0.0" );
+ File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ file.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
+
+ assertFalse( "Check artifact not created", file.exists() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testUnmodifiedArtifact()
+ throws RepositoryConversionException, IOException, InterruptedException
+ {
+ // test the unmodified artifact is untouched
+
+ Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
+ Artifact pomArtifact = createPomArtifact( artifact );
+
+ File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
+
+ assertTrue( "Check target file exists", targetFile.exists() );
+ assertTrue( "Check target POM exists", targetPomFile.exists() );
+
+ sourceFile.setLastModified( System.currentTimeMillis() );
+ sourcePomFile.setLastModified( System.currentTimeMillis() );
+
+ long origTime = targetFile.lastModified();
+ long origPomTime = targetPomFile.lastModified();
+
+ // Need to guarantee last modified is not equal
+ Thread.sleep( SLEEP_MILLIS );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ compareFiles( sourceFile, targetFile );
+ compareFiles( sourcePomFile, targetPomFile );
+
+ assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
+ assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
+ }
+
+ public void testModifedArtifactFails()
+ throws InterruptedException, RepositoryConversionException, IOException
+ {
+ // test that it fails when the source artifact has changed and is different to the existing artifact in the
+ // target repository
+
+ Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
+ Artifact pomArtifact = createPomArtifact( artifact );
+
+ File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
+
+ assertTrue( "Check target file exists", targetFile.exists() );
+ assertTrue( "Check target POM exists", targetPomFile.exists() );
+
+ sourceFile.setLastModified( System.currentTimeMillis() );
+ sourcePomFile.setLastModified( System.currentTimeMillis() );
+
+ long origTime = targetFile.lastModified();
+ long origPomTime = targetPomFile.lastModified();
+
+ // Need to guarantee last modified is not equal
+ Thread.sleep( SLEEP_MILLIS );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
+ getFailure().getReason() );
+
+ assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
+ assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testForcedUnmodifiedArtifact()
+ throws Exception, IOException
+ {
+ // test unmodified artifact is still converted when set to force
+
+ repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "force-repository-converter" );
+
+ Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
+ Artifact pomArtifact = createPomArtifact( artifact );
+
+ File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
+
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyy-MM-dd", Locale.getDefault() );
+ long origTime = dateFormat.parse( "2006-03-03" ).getTime();
+ targetFile.setLastModified( origTime );
+ targetPomFile.setLastModified( origTime );
+
+ sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
+ sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ compareFiles( sourceFile, targetFile );
+ compareFiles( sourcePomFile, targetPomFile );
+
+ assertFalse( "Check modified", origTime == targetFile.lastModified() );
+ assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertTrue( "Check metadata created", metadataFile.exists() );
+ }
+
+ public void testDryRunSuccess()
+ throws Exception
+ {
+ // test dry run does nothing on a run that will be successful, and returns success
+
+ repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
+
+ Artifact artifact = createArtifact( "test", "dryrun-artifact", "1.0.0" );
+ Artifact pomArtifact = createPomArtifact( artifact );
+
+ File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ assertTrue( "Check source file exists", sourceFile.exists() );
+ assertTrue( "Check source POM exists", sourcePomFile.exists() );
+
+ assertFalse( "Check target file doesn't exist", targetFile.exists() );
+ assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testDryRunFailure()
+ throws Exception
+ {
+ // test dry run does nothing on a run that will fail, and returns failure
+
+ repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
+
+ Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
+ Artifact pomArtifact = createPomArtifact( artifact );
+
+ File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
+ File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
+
+ assertTrue( "Check target file exists", targetFile.exists() );
+ assertTrue( "Check target POM exists", targetPomFile.exists() );
+
+ sourceFile.setLastModified( System.currentTimeMillis() );
+ sourcePomFile.setLastModified( System.currentTimeMillis() );
+
+ long origTime = targetFile.lastModified();
+ long origPomTime = targetPomFile.lastModified();
+
+ // Need to guarantee last modified is not equal
+ Thread.sleep( SLEEP_MILLIS );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
+ getFailure().getReason() );
+
+ assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
+ assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testRollbackArtifactCreated()
+ throws RepositoryConversionException, IOException
+ {
+ // test rollback can remove a created artifact, including checksums
+
+ Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
+
+ ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ File versionMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
+ assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
+
+ assertFalse( "check artifact rolled back", artifactFile.exists() );
+ assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
+ assertFalse( "check metadata rolled back", versionMetadataFile.exists() );
+ }
+
+ public void testMultipleArtifacts()
+ throws RepositoryConversionException, IOException
+ {
+ // test multiple artifacts are converted
+
+ List artifacts = new ArrayList();
+ artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
+ artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
+ artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
+ repositoryConverter.convert( artifacts, targetRepository, reporter );
+ assertEquals( "check no errors", 0, reporter.getFailures() );
+ assertEquals( "check no warnings", 0, reporter.getWarnings() );
+ assertEquals( "check successes", 3, reporter.getSuccesses() );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile =
+ getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( expectedPomFile, pomFile );
+ }
+ }
+
+ public void testInvalidSourceArtifactMetadata()
+ throws Exception
+ {
+ // test artifact is not converted when source metadata is invalid, and returns failure
+
+ createModernSourceRepository();
+
+ Artifact artifact = createArtifact( "test", "incorrectArtifactMetadata", "1.0.0" );
+ File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ file.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
+ getFailure().getReason() );
+
+ assertFalse( "Check artifact not created", file.exists() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testInvalidSourceSnapshotMetadata()
+ throws Exception, MalformedURLException
+ {
+ // test artifact is not converted when source snapshot metadata is invalid and returns failure
+
+ createModernSourceRepository();
+
+ Artifact artifact = createArtifact( "test", "incorrectSnapshotMetadata", "1.0.0-20060102.030405-6" );
+ File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ file.delete();
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkFailure();
+ assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
+ getFailure().getReason() );
+
+ assertFalse( "Check artifact not created", file.exists() );
+
+ ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
+ File metadataFile =
+ new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ assertFalse( "Check metadata not created", metadataFile.exists() );
+ }
+
+ public void testMergeArtifactMetadata()
+ throws RepositoryConversionException, IOException
+ {
+ // test artifact level metadata is merged when it already exists on successful conversion
+
+ Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
+
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ checkSuccess();
+
+ File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ assertTrue( "Check artifact created", artifactFile.exists() );
+ assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
+ assertTrue( "Check POM created", pomFile.exists() );
+
+ compareFiles( sourcePomFile, pomFile );
+
+ ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(),
+ targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
+
+ File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
+
+ compareFiles( expectedMetadataFile, artifactMetadataFile );
+ }
+
+ public void testSourceAndTargetRepositoriesMatch()
+ throws Exception
+ {
+ // test that it fails if the same
+
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(),
+ targetRepository.getLayout(), null, null );
+
+ Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
+
+ try
+ {
+ repositoryConverter.convert( artifact, targetRepository, reporter );
+ fail( "Should have failed trying to convert within the same repository" );
+ }
+ catch ( RepositoryConversionException e )
+ {
+ // expected
+ assertEquals( "check message", getI18nString( "exception.repositories.match" ), e.getMessage() );
+ assertNull( "Check no additional cause", e.getCause() );
+ }
+ }
+
+ private Artifact createArtifact( String groupId, String artifactId, String version )
+ {
+ Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( version );
+ String baseVersion;
+ if ( matcher.matches() )
+ {
+ baseVersion = matcher.group( 1 ) + "-SNAPSHOT";
+ }
+ else
+ {
+ baseVersion = version;
+ }
+ return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
+ }
+
+ private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
+ String type )
+ {
+ Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
+ artifact.setBaseVersion( baseVersion );
+ artifact.setRepository( sourceRepository );
+ artifact.setFile( new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) ) );
+ return artifact;
+ }
+
+ private Artifact createPomArtifact( Artifact artifact )
+ {
+ return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(),
+ artifact.getVersion(), "pom" );
+ }
+
+ private static void compareFiles( File expectedPomFile, File pomFile )
+ throws IOException
+ {
+ String expectedContent = normalizeString( FileUtils.fileRead( expectedPomFile ) );
+ String targetContent = normalizeString( FileUtils.fileRead( pomFile ) );
+ assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent,
+ targetContent );
+ }
+
+ private static String normalizeString( String path )
+ {
+ return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
+ }
+
+ private void checkSuccess()
+ {
+ assertEquals( "check no errors", 0, reporter.getFailures() );
+ assertEquals( "check no warnings", 0, reporter.getWarnings() );
+ assertEquals( "check success", 1, reporter.getSuccesses() );
+ }
+
+ private void checkFailure()
+ {
+ assertEquals( "check num errors", 1, reporter.getFailures() );
+ assertEquals( "check no warnings", 0, reporter.getWarnings() );
+ assertEquals( "check no success", 0, reporter.getSuccesses() );
+ }
+
+ private String getI18nString( String key )
+ {
+ return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
+ }
+
+ private ArtifactResult getFailure()
+ {
+ return (ArtifactResult) reporter.getArtifactFailureIterator().next();
+ }
+
+ private ArtifactResult getWarning()
+ {
+ return (ArtifactResult) reporter.getArtifactWarningIterator().next();
+ }
+
+ private void createModernSourceRepository()
+ throws Exception
+ {
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File sourceBase = getTestFile( "src/test/source-modern-repository" );
+ sourceRepository =
+ factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class CopyFileEventTest
+ extends PlexusTestCase
+{
+ private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/copy-file" );
+
+ private File testDest = new File( testDir, "test-file.txt" );
+
+ private File testSource = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/test-file.txt" );
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ testSource.getParentFile().mkdirs();
+
+ testSource.createNewFile();
+
+ FileUtils.fileWrite( testSource.getAbsolutePath(), "source contents" );
+ }
+
+ public void testCopyCommitRollback()
+ throws Exception
+ {
+ assertTrue( "Test if the source exists", testSource.exists() );
+
+ String source = FileUtils.fileRead( testSource.getAbsolutePath() );
+
+ CopyFileEvent event = new CopyFileEvent( testSource, testDest );
+
+ assertFalse( "Test that the destination is not yet created", testDest.exists() );
+
+ event.commit();
+
+ assertTrue( "Test that the destination is created", testDest.exists() );
+
+ String target = FileUtils.fileRead( testDest.getAbsolutePath() );
+
+ assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
+
+ event.rollback();
+
+ assertFalse( "Test that the destination file has been deleted", testDest.exists() );
+ }
+
+ public void testCopyCommitRollbackWithBackup()
+ throws Exception
+ {
+ assertTrue( "Test if the source exists", testSource.exists() );
+
+ String source = FileUtils.fileRead( testSource.getAbsolutePath() );
+
+ testDest.getParentFile().mkdirs();
+
+ testDest.createNewFile();
+
+ FileUtils.fileWrite( testDest.getAbsolutePath(), "overwritten contents" );
+
+ assertTrue( "Test that the destination exists", testDest.exists() );
+
+ CopyFileEvent event = new CopyFileEvent( testSource, testDest );
+
+ String target = FileUtils.fileRead( testDest.getAbsolutePath() );
+
+ assertTrue( "Test that the destination contents have not changed", target.equals( "overwritten contents" ) );
+
+ event.commit();
+
+ target = FileUtils.fileRead( testDest.getAbsolutePath() );
+
+ assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
+
+ event.rollback();
+
+ target = FileUtils.fileRead( testDest.getAbsolutePath() );
+
+ assertTrue( "Test the destination file contents have been restored", target.equals( "overwritten contents" ) );
+ }
+
+ public void testCreateRollbackCommit()
+ throws Exception
+ {
+ assertTrue( "Test if the source exists", testSource.exists() );
+
+ String source = FileUtils.fileRead( testSource.getAbsolutePath() );
+
+ CopyFileEvent event = new CopyFileEvent( testSource, testDest );
+
+ assertFalse( "Test that the destination is not yet created", testDest.exists() );
+
+ event.rollback();
+
+ assertFalse( "Test that the destination file is not yet created", testDest.exists() );
+
+ event.commit();
+
+ assertTrue( "Test that the destination is created", testDest.exists() );
+
+ String target = FileUtils.fileRead( testDest.getAbsolutePath() );
+
+ assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ super.tearDown();
+
+ FileUtils.deleteDirectory(
+ new File( PlexusTestCase.getBasedir(), "target/transaction-tests" ).getAbsolutePath() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.converter.transaction;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class CreateFileEventTest
+ extends PlexusTestCase
+{
+ private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/create-file" );
+
+ public void testCreateCommitRollback()
+ throws Exception
+ {
+ File testFile = new File( testDir, "test-file.txt" );
+
+ CreateFileEvent event = new CreateFileEvent( "file contents", testFile );
+
+ assertFalse( "Test file is not yet created", testFile.exists() );
+
+ event.commit();
+
+ assertTrue( "Test file is not yet created", testFile.exists() );
+
+ event.rollback();
+
+ assertFalse( "Test file is has been deleted after rollback", testFile.exists() );
+ assertFalse( "Test file parent directories has been rolledback too", testDir.exists() );
+ assertTrue( "target directory still exists", new File( PlexusTestCase.getBasedir(), "target" ).exists() );
+ }
+
+ public void testCreateCommitRollbackWithBackup()
+ throws Exception
+ {
+ File testFile = new File( testDir, "test-file.txt" );
+
+ testFile.getParentFile().mkdirs();
+
+ testFile.createNewFile();
+
+ FileUtils.fileWrite( testFile.getAbsolutePath(), "original contents" );
+
+ CreateFileEvent event = new CreateFileEvent( "modified contents", testFile );
+
+ String contents = FileUtils.fileRead( testFile.getAbsolutePath() );
+
+ assertEquals( "Test contents have not changed", "original contents", contents );
+
+ event.commit();
+
+ contents = FileUtils.fileRead( testFile.getAbsolutePath() );
+
+ assertEquals( "Test contents have not changed", "modified contents", contents );
+
+ event.rollback();
+
+ contents = FileUtils.fileRead( testFile.getAbsolutePath() );
+
+ assertEquals( "Test contents have not changed", "original contents", contents );
+ }
+
+ public void testCreateRollbackCommit()
+ throws Exception
+ {
+ File testFile = new File( testDir, "test-file.txt" );
+
+ CreateFileEvent event = new CreateFileEvent( "file contents", testFile );
+
+ assertFalse( "Test file is not yet created", testFile.exists() );
+
+ event.rollback();
+
+ assertFalse( "Test file is not yet created", testFile.exists() );
+
+ event.commit();
+
+ assertTrue( "Test file is not yet created", testFile.exists() );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ super.tearDown();
+
+ FileUtils.deleteDirectory(
+ new File( PlexusTestCase.getBasedir(), "target/transaction-tests" ).getAbsolutePath() );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.converter;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.metadata.ArtifactMetadata;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.repository.reporting.ArtifactReporter;
-import org.apache.maven.repository.reporting.ArtifactResult;
-import org.apache.maven.repository.reporting.DefaultArtifactReporter;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.i18n.I18N;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.regex.Matcher;
-
-/**
- * Test the repository converter.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo what about deletions from the source repository?
- * @todo use artifact-test instead
- * @todo should reject if dependencies are missing - rely on reporting?
- * @todo group metadata
- */
-public class RepositoryConverterTest
- extends PlexusTestCase
-{
- private ArtifactRepository sourceRepository;
-
- private ArtifactRepository targetRepository;
-
- private RepositoryConverter repositoryConverter;
-
- private ArtifactFactory artifactFactory;
-
- private ArtifactReporter reporter;
-
- private static final int SLEEP_MILLIS = 100;
-
- private I18N i18n;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
-
- File sourceBase = getTestFile( "src/test/source-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
-
- layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File targetBase = getTestFile( "target/test-target-repository" );
- copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
-
- targetRepository =
- factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null );
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- i18n = (I18N) lookup( I18N.ROLE );
-
- reporter = new DefaultArtifactReporter();
- }
-
- private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
- throws IOException
- {
- if ( !sourceDirectory.exists() )
- {
- throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
- }
-
- File[] files = sourceDirectory.listFiles();
-
- String sourcePath = sourceDirectory.getAbsolutePath();
-
- for ( int i = 0; i < files.length; i++ )
- {
- File file = files[i];
-
- String dest = file.getAbsolutePath();
-
- dest = dest.substring( sourcePath.length() + 1 );
-
- File destination = new File( destinationDirectory, dest );
-
- if ( file.isFile() )
- {
- destination = destination.getParentFile();
-
- FileUtils.copyFileToDirectory( file, destination );
- }
- else if ( file.isDirectory() )
- {
- if ( !".svn".equals( file.getName() ) )
- {
- if ( !destination.exists() && !destination.mkdirs() )
- {
- throw new IOException(
- "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
- }
- copyDirectoryStructure( file, destination );
- }
- }
- else
- {
- throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
- }
- }
- }
-
- public void testV4PomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that it is copied as is
-
- Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- artifactFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v4-version-metadata.xml" );
-
- compareFiles( expectedMetadataFile, versionMetadataFile );
- }
-
- public void testV3PomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-version-metadata.xml" );
-
- compareFiles( expectedMetadataFile, versionMetadataFile );
- }
-
- public void testV3PomConvertWithRelocation()
- throws RepositoryConversionException, IOException
- {
- Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- //checkSuccess(); --> commented until MNG-2100 is fixed
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check if relocated artifact created", artifactFile.exists() );
- assertTrue( "Check if relocated artifact matches",
- FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
- Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
- File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
- File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
- compareFiles( pomFile, testFile );
-
- Artifact orig = createArtifact( "test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
- artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( orig ) );
- assertTrue( "Check if relocation artifact pom is created", artifactFile.exists() );
- testFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( orig ) );
- compareFiles( artifactFile, testFile );
- }
-
- public void testV3PomWarningsOnConvert()
- throws RepositoryConversionException, IOException
- {
- // test that the pom is converted but that warnings are reported
-
- Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- assertEquals( "check no errors", 0, reporter.getFailures() );
- assertEquals( "check number of warnings", 2, reporter.getWarnings() );
- assertEquals( "check success", 1, reporter.getSuccesses() );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-warnings.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- // TODO: check 2 warnings (extend and versions) matched on i18n key
- }
-
- private void doTestV4SnapshotPomConvert( String version, String expectedMetadataFileName )
- throws RepositoryConversionException, IOException
- {
- // test that it is copied as is
-
- Artifact artifact = createArtifact( "test", "v4artifact", version );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( expectedMetadataFileName );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testV3SnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-snapshot.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-metadata.xml" );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testV4SnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- doTestV4SnapshotPomConvert( "1.0.0-SNAPSHOT", "src/test/expected-files/v4-snapshot-metadata.xml" );
-
- assertTrue( true );
- }
-
- public void testV4TimestampedSnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- doTestV4SnapshotPomConvert( "1.0.0-20060111.120115-1",
- "src/test/expected-files/v4-timestamped-snapshot-metadata.xml" );
-
- assertTrue( true );
- }
-
- public void testV3TimestampedSnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-timestamped-snapshot.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact snapshotMetadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot snapshotMetadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-timestamped-snapshot-metadata.xml" );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testNoPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that a POM is not created when there was none at the source
-
- Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
- repositoryConverter.convert( artifact, targetRepository, reporter );
- assertEquals( "check no errors", 0, reporter.getFailures() );
- assertEquals( "check no warnings", 1, reporter.getWarnings() );
- assertEquals( "check success", 1, reporter.getSuccesses() );
- assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
-
- assertFalse( "Check no POM created", pomFile.exists() );
- assertFalse( "No source POM", sourcePomFile.exists() );
- }
-
- public void testIncorrectSourceChecksumMd5()
- throws RepositoryConversionException
- {
- // test that it fails when the source md5 is wrong
-
- Artifact artifact = createArtifact( "test", "incorrectMd5Artifact", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testIncorrectSourceChecksumSha1()
- throws RepositoryConversionException
- {
- // test that it fails when the source sha1 is wrong
-
- Artifact artifact = createArtifact( "test", "incorrectSha1Artifact", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testUnmodifiedArtifact()
- throws RepositoryConversionException, IOException, InterruptedException
- {
- // test the unmodified artifact is untouched
-
- Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- compareFiles( sourceFile, targetFile );
- compareFiles( sourcePomFile, targetPomFile );
-
- assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
- }
-
- public void testModifedArtifactFails()
- throws InterruptedException, RepositoryConversionException, IOException
- {
- // test that it fails when the source artifact has changed and is different to the existing artifact in the
- // target repository
-
- Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
-
- assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testForcedUnmodifiedArtifact()
- throws Exception, IOException
- {
- // test unmodified artifact is still converted when set to force
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "force-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyy-MM-dd", Locale.getDefault() );
- long origTime = dateFormat.parse( "2006-03-03" ).getTime();
- targetFile.setLastModified( origTime );
- targetPomFile.setLastModified( origTime );
-
- sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
- sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- compareFiles( sourceFile, targetFile );
- compareFiles( sourcePomFile, targetPomFile );
-
- assertFalse( "Check modified", origTime == targetFile.lastModified() );
- assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertTrue( "Check metadata created", metadataFile.exists() );
- }
-
- public void testDryRunSuccess()
- throws Exception
- {
- // test dry run does nothing on a run that will be successful, and returns success
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "dryrun-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- assertTrue( "Check source file exists", sourceFile.exists() );
- assertTrue( "Check source POM exists", sourcePomFile.exists() );
-
- assertFalse( "Check target file doesn't exist", targetFile.exists() );
- assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testDryRunFailure()
- throws Exception
- {
- // test dry run does nothing on a run that will fail, and returns failure
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
-
- assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testRollbackArtifactCreated()
- throws RepositoryConversionException, IOException
- {
- // test rollback can remove a created artifact, including checksums
-
- Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
- assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
-
- assertFalse( "check artifact rolled back", artifactFile.exists() );
- assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
- assertFalse( "check metadata rolled back", versionMetadataFile.exists() );
- }
-
- public void testMultipleArtifacts()
- throws RepositoryConversionException, IOException
- {
- // test multiple artifacts are converted
-
- List artifacts = new ArrayList();
- artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
- artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
- artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
- repositoryConverter.convert( artifacts, targetRepository, reporter );
- assertEquals( "check no errors", 0, reporter.getFailures() );
- assertEquals( "check no warnings", 0, reporter.getWarnings() );
- assertEquals( "check successes", 3, reporter.getSuccesses() );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile =
- getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
- }
- }
-
- public void testInvalidSourceArtifactMetadata()
- throws Exception
- {
- // test artifact is not converted when source metadata is invalid, and returns failure
-
- createModernSourceRepository();
-
- Artifact artifact = createArtifact( "test", "incorrectArtifactMetadata", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
- getFailure().getReason() );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testInvalidSourceSnapshotMetadata()
- throws Exception, MalformedURLException
- {
- // test artifact is not converted when source snapshot metadata is invalid and returns failure
-
- createModernSourceRepository();
-
- Artifact artifact = createArtifact( "test", "incorrectSnapshotMetadata", "1.0.0-20060102.030405-6" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
- getFailure().getReason() );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testMergeArtifactMetadata()
- throws RepositoryConversionException, IOException
- {
- // test artifact level metadata is merged when it already exists on successful conversion
-
- Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
-
- repositoryConverter.convert( artifact, targetRepository, reporter );
- checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
- }
-
- public void testSourceAndTargetRepositoriesMatch()
- throws Exception
- {
- // test that it fails if the same
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(),
- targetRepository.getLayout(), null, null );
-
- Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
-
- try
- {
- repositoryConverter.convert( artifact, targetRepository, reporter );
- fail( "Should have failed trying to convert within the same repository" );
- }
- catch ( RepositoryConversionException e )
- {
- // expected
- assertEquals( "check message", getI18nString( "exception.repositories.match" ), e.getMessage() );
- assertNull( "Check no additional cause", e.getCause() );
- }
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String version )
- {
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( version );
- String baseVersion;
- if ( matcher.matches() )
- {
- baseVersion = matcher.group( 1 ) + "-SNAPSHOT";
- }
- else
- {
- baseVersion = version;
- }
- return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
- String type )
- {
- Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
- artifact.setBaseVersion( baseVersion );
- artifact.setRepository( sourceRepository );
- artifact.setFile( new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) ) );
- return artifact;
- }
-
- private Artifact createPomArtifact( Artifact artifact )
- {
- return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(),
- artifact.getVersion(), "pom" );
- }
-
- private static void compareFiles( File expectedPomFile, File pomFile )
- throws IOException
- {
- String expectedContent = normalizeString( FileUtils.fileRead( expectedPomFile ) );
- String targetContent = normalizeString( FileUtils.fileRead( pomFile ) );
- assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent,
- targetContent );
- }
-
- private static String normalizeString( String path )
- {
- return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
- }
-
- private void checkSuccess()
- {
- assertEquals( "check no errors", 0, reporter.getFailures() );
- assertEquals( "check no warnings", 0, reporter.getWarnings() );
- assertEquals( "check success", 1, reporter.getSuccesses() );
- }
-
- private void checkFailure()
- {
- assertEquals( "check num errors", 1, reporter.getFailures() );
- assertEquals( "check no warnings", 0, reporter.getWarnings() );
- assertEquals( "check no success", 0, reporter.getSuccesses() );
- }
-
- private String getI18nString( String key )
- {
- return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
- }
-
- private ArtifactResult getFailure()
- {
- return (ArtifactResult) reporter.getArtifactFailureIterator().next();
- }
-
- private ArtifactResult getWarning()
- {
- return (ArtifactResult) reporter.getArtifactWarningIterator().next();
- }
-
- private void createModernSourceRepository()
- throws Exception
- {
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File sourceBase = getTestFile( "src/test/source-modern-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public class CopyFileEventTest
- extends PlexusTestCase
-{
- private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/copy-file" );
-
- private File testDest = new File( testDir, "test-file.txt" );
-
- private File testSource = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/test-file.txt" );
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- testSource.getParentFile().mkdirs();
-
- testSource.createNewFile();
-
- FileUtils.fileWrite( testSource.getAbsolutePath(), "source contents" );
- }
-
- public void testCopyCommitRollback()
- throws Exception
- {
- assertTrue( "Test if the source exists", testSource.exists() );
-
- String source = FileUtils.fileRead( testSource.getAbsolutePath() );
-
- CopyFileEvent event = new CopyFileEvent( testSource, testDest );
-
- assertFalse( "Test that the destination is not yet created", testDest.exists() );
-
- event.commit();
-
- assertTrue( "Test that the destination is created", testDest.exists() );
-
- String target = FileUtils.fileRead( testDest.getAbsolutePath() );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
-
- event.rollback();
-
- assertFalse( "Test that the destination file has been deleted", testDest.exists() );
- }
-
- public void testCopyCommitRollbackWithBackup()
- throws Exception
- {
- assertTrue( "Test if the source exists", testSource.exists() );
-
- String source = FileUtils.fileRead( testSource.getAbsolutePath() );
-
- testDest.getParentFile().mkdirs();
-
- testDest.createNewFile();
-
- FileUtils.fileWrite( testDest.getAbsolutePath(), "overwritten contents" );
-
- assertTrue( "Test that the destination exists", testDest.exists() );
-
- CopyFileEvent event = new CopyFileEvent( testSource, testDest );
-
- String target = FileUtils.fileRead( testDest.getAbsolutePath() );
-
- assertTrue( "Test that the destination contents have not changed", target.equals( "overwritten contents" ) );
-
- event.commit();
-
- target = FileUtils.fileRead( testDest.getAbsolutePath() );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
-
- event.rollback();
-
- target = FileUtils.fileRead( testDest.getAbsolutePath() );
-
- assertTrue( "Test the destination file contents have been restored", target.equals( "overwritten contents" ) );
- }
-
- public void testCreateRollbackCommit()
- throws Exception
- {
- assertTrue( "Test if the source exists", testSource.exists() );
-
- String source = FileUtils.fileRead( testSource.getAbsolutePath() );
-
- CopyFileEvent event = new CopyFileEvent( testSource, testDest );
-
- assertFalse( "Test that the destination is not yet created", testDest.exists() );
-
- event.rollback();
-
- assertFalse( "Test that the destination file is not yet created", testDest.exists() );
-
- event.commit();
-
- assertTrue( "Test that the destination is created", testDest.exists() );
-
- String target = FileUtils.fileRead( testDest.getAbsolutePath() );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
-
- FileUtils.deleteDirectory( new File( PlexusTestCase.getBasedir(),
- "target/transaction-tests" ).getAbsolutePath() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.converter.transaction;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-
-/**
- * @author Edwin Punzalan
- */
-public class CreateFileEventTest
- extends PlexusTestCase
-{
- private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/create-file" );
-
- public void testCreateCommitRollback()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- CreateFileEvent event = new CreateFileEvent( "file contents", testFile );
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.commit();
-
- assertTrue( "Test file is not yet created", testFile.exists() );
-
- event.rollback();
-
- assertFalse( "Test file is has been deleted after rollback", testFile.exists() );
- assertFalse( "Test file parent directories has been rolledback too", testDir.exists() );
- assertTrue( "target directory still exists", new File( PlexusTestCase.getBasedir(), "target" ).exists() );
- }
-
- public void testCreateCommitRollbackWithBackup()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- testFile.getParentFile().mkdirs();
-
- testFile.createNewFile();
-
- FileUtils.fileWrite( testFile.getAbsolutePath(), "original contents" );
-
- CreateFileEvent event = new CreateFileEvent( "modified contents", testFile );
-
- String contents = FileUtils.fileRead( testFile.getAbsolutePath() );
-
- assertEquals( "Test contents have not changed", "original contents", contents );
-
- event.commit();
-
- contents = FileUtils.fileRead( testFile.getAbsolutePath() );
-
- assertEquals( "Test contents have not changed", "modified contents", contents );
-
- event.rollback();
-
- contents = FileUtils.fileRead( testFile.getAbsolutePath() );
-
- assertEquals( "Test contents have not changed", "original contents", contents );
- }
-
- public void testCreateRollbackCommit()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- CreateFileEvent event = new CreateFileEvent( "file contents", testFile );
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.rollback();
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.commit();
-
- assertTrue( "Test file is not yet created", testFile.exists() );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
-
- FileUtils.deleteDirectory( new File( PlexusTestCase.getBasedir(),
- "target/transaction-tests" ).getAbsolutePath() );
- }
-}
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.converter.RepositoryConverter</role>
+ <implementation>org.apache.maven.archiva.converter.DefaultRepositoryConverter</implementation>
+ <role-hint>force-repository-converter</role-hint>
+ <configuration>
+ <force>true</force>
+ </configuration>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.digest.Digester</role>
+ <role-hint>sha1</role-hint>
+ <field-name>sha1Digester</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.digest.Digester</role>
+ <role-hint>md5</role-hint>
+ <field-name>md5Digester</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ <field-name>artifactFactory</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
+ <field-name>rewriter</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.i18n.I18N</role>
+ <field-name>i18n</field-name>
+ </requirement>
+ </requirements>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.converter.RepositoryConverter</role>
+ <implementation>org.apache.maven.archiva.converter.DefaultRepositoryConverter</implementation>
+ <role-hint>dryrun-repository-converter</role-hint>
+ <configuration>
+ <dryrun>true</dryrun>
+ </configuration>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.digest.Digester</role>
+ <role-hint>sha1</role-hint>
+ <field-name>sha1Digester</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.digest.Digester</role>
+ <role-hint>md5</role-hint>
+ <field-name>md5Digester</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
+ <field-name>artifactFactory</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
+ <field-name>rewriter</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.i18n.I18N</role>
+ <field-name>i18n</field-name>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.repository.converter.RepositoryConverter</role>
- <implementation>org.apache.maven.repository.converter.DefaultRepositoryConverter</implementation>
- <role-hint>force-repository-converter</role-hint>
- <configuration>
- <force>true</force>
- </configuration>
- <requirements>
- <requirement>
- <role>org.apache.maven.repository.digest.Digester</role>
- <role-hint>sha1</role-hint>
- <field-name>sha1Digester</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.repository.digest.Digester</role>
- <role-hint>md5</role-hint>
- <field-name>md5Digester</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
- <field-name>artifactFactory</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.i18n.I18N</role>
- <field-name>i18n</field-name>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.repository.converter.RepositoryConverter</role>
- <implementation>org.apache.maven.repository.converter.DefaultRepositoryConverter</implementation>
- <role-hint>dryrun-repository-converter</role-hint>
- <configuration>
- <dryrun>true</dryrun>
- </configuration>
- <requirements>
- <requirement>
- <role>org.apache.maven.repository.digest.Digester</role>
- <role-hint>sha1</role-hint>
- <field-name>sha1Digester</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.repository.digest.Digester</role>
- <role-hint>md5</role-hint>
- <field-name>md5Digester</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.artifact.factory.ArtifactFactory</role>
- <field-name>artifactFactory</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.i18n.I18N</role>
- <field-name>i18n</field-name>
- </requirement>
- </requirements>
- </component>
- </components>
-</component-set>
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva;
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.archiva.discovery.ArtifactDiscoverer;
+import org.apache.maven.archiva.discovery.DiscovererException;
+import org.apache.maven.archiva.reporting.ArtifactReporter;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.List;
+
+/**
+ * @author Jason van Zyl
+ * @plexus.component
+ */
+public class DefaultRepositoryManager
+ implements RepositoryManager
+{
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private ArtifactDiscoverer artifactDiscoverer;
+
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private ArtifactRepositoryLayout legacyLayout;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private ArtifactRepositoryLayout defaultLayout;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactRepositoryFactory artifactRepositoryFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryConverter repositoryConverter;
+
+ /**
+ * @plexus.requirement role-hint="default"
+ */
+ private ArtifactReporter reporter;
+
+ public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
+ boolean includeSnapshots )
+ throws RepositoryConversionException, DiscovererException
+ {
+ ArtifactRepository legacyRepository;
+
+ ArtifactRepository repository;
+
+ try
+ {
+ legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
+ legacyRepositoryDirectory.toURI().toURL().toString(),
+ legacyLayout, null, null );
+
+ repository = artifactRepositoryFactory.createArtifactRepository( "default",
+ repositoryDirectory.toURI().toURL().toString(),
+ defaultLayout, null, null );
+ }
+ catch ( MalformedURLException e )
+ {
+ throw new RepositoryConversionException( "Error convering legacy repository.", e );
+ }
+
+ List legacyArtifacts =
+ artifactDiscoverer.discoverArtifacts( legacyRepository, "converter", null, includeSnapshots );
+
+ repositoryConverter.convert( legacyArtifacts, repository, reporter );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva;
+
+import org.apache.maven.archiva.converter.RepositoryConversionException;
+import org.apache.maven.archiva.discovery.DiscovererException;
+
+import java.io.File;
+
+/**
+ * @author Jason van Zyl
+ */
+public interface RepositoryManager
+{
+ /**
+ * Role of the Repository Manager
+ */
+ String ROLE = RepositoryManager.class.getName();
+
+ /**
+ * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
+ * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
+ *
+ * @param legacyRepositoryDirectory
+ * @param repositoryDirectory
+ * @throws RepositoryConversionException
+ */
+ void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, boolean includeSnapshots )
+ throws RepositoryConversionException, DiscovererException;
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.proxy.ProxiedArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * Create an artifact repository from the given configuration.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface ConfiguredRepositoryFactory
+{
+ String ROLE = ConfiguredRepositoryFactory.class.getName();
+
+ /**
+ * Create an artifact repository from the given configuration.
+ *
+ * @param configuration the configuration
+ * @return the artifact repository
+ */
+ ArtifactRepository createRepository( RepositoryConfiguration configuration );
+
+ /**
+ * Create artifact repositories from the given configuration.
+ *
+ * @param configuration the configuration containing the repositories
+ * @return the artifact repositories
+ */
+ List createRepositories( Configuration configuration );
+
+ /**
+ * Create a local repository from the given configuration.
+ *
+ * @param configuration the configuration
+ * @return the local artifact repository
+ */
+ ArtifactRepository createLocalRepository( Configuration configuration );
+
+ /**
+ * Create an artifact repository from the given proxy repository configuration.
+ *
+ * @param configuration the configuration
+ * @return the artifact repository
+ */
+ ProxiedArtifactRepository createProxiedRepository( ProxiedRepositoryConfiguration configuration );
+
+ /**
+ * Create artifact repositories from the given proxy repository configurations.
+ *
+ * @param configuration the configuration containing the repositories
+ * @return the artifact repositories
+ */
+ List createProxiedRepositories( Configuration configuration );
+}
--- /dev/null
+package org.apache.maven.archiva.configuration;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.proxy.ProxiedArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Create artifact repositories from a configuration.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory"
+ */
+public class DefaultConfiguredRepositoryFactory
+ implements ConfiguredRepositoryFactory
+{
+ /**
+ * @plexus.requirement role="org.apache.maven.artifact.archiva.layout.ArtifactRepositoryLayout"
+ */
+ private Map repositoryLayouts;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactRepositoryFactory repoFactory;
+
+ public ArtifactRepository createRepository( RepositoryConfiguration configuration )
+ {
+ File repositoryDirectory = new File( configuration.getDirectory() );
+ String repoDir = repositoryDirectory.toURI().toString();
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( configuration.getLayout() );
+ return repoFactory.createArtifactRepository( configuration.getId(), repoDir, layout, null, null );
+ }
+
+ public ProxiedArtifactRepository createProxiedRepository( ProxiedRepositoryConfiguration configuration )
+ {
+ boolean enabled = isEnabled( configuration.getSnapshotsPolicy() );
+ String updatePolicy =
+ getUpdatePolicy( configuration.getSnapshotsPolicy(), configuration.getSnapshotsInterval() );
+ ArtifactRepositoryPolicy snapshotsPolicy =
+ new ArtifactRepositoryPolicy( enabled, updatePolicy, ArtifactRepositoryPolicy.CHECKSUM_POLICY_FAIL );
+
+ enabled = isEnabled( configuration.getReleasesPolicy() );
+ updatePolicy = getUpdatePolicy( configuration.getReleasesPolicy(), configuration.getReleasesInterval() );
+ ArtifactRepositoryPolicy releasesPolicy =
+ new ArtifactRepositoryPolicy( enabled, updatePolicy, ArtifactRepositoryPolicy.CHECKSUM_POLICY_FAIL );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( configuration.getLayout() );
+ ArtifactRepository artifactRepository = repoFactory.createArtifactRepository( configuration.getId(),
+ configuration.getUrl(), layout,
+ snapshotsPolicy, releasesPolicy );
+ ProxiedArtifactRepository repository = new ProxiedArtifactRepository( artifactRepository );
+ repository.setCacheFailures( configuration.isCacheFailures() );
+ repository.setHardFail( configuration.isHardFail() );
+ repository.setName( configuration.getName() );
+ repository.setUseNetworkProxy( configuration.isUseNetworkProxy() );
+ return repository;
+ }
+
+ public List createRepositories( Configuration configuration )
+ {
+ List managedRepositories = configuration.getRepositories();
+ List repositories = new ArrayList( managedRepositories.size() );
+
+ for ( Iterator i = managedRepositories.iterator(); i.hasNext(); )
+ {
+ repositories.add( createRepository( (RepositoryConfiguration) i.next() ) );
+ }
+
+ return repositories;
+ }
+
+ public List createProxiedRepositories( Configuration configuration )
+ {
+ List proxiedRepositories = configuration.getProxiedRepositories();
+ List repositories = new ArrayList( proxiedRepositories.size() );
+
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ repositories.add( createProxiedRepository( (ProxiedRepositoryConfiguration) i.next() ) );
+ }
+
+ return repositories;
+ }
+
+ public ArtifactRepository createLocalRepository( Configuration configuration )
+ {
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( "default" );
+ File localRepository = new File( configuration.getLocalRepository() );
+ localRepository.mkdirs();
+ return repoFactory.createArtifactRepository( "local", localRepository.toURI().toString(), layout, null, null );
+ }
+
+ private static String getUpdatePolicy( String policy, int interval )
+ {
+ return "interval".equals( policy ) ? policy + ":" + interval : policy;
+ }
+
+ private static boolean isEnabled( String policy )
+ {
+ return !"disabled".equals( policy );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.ProxiedRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Proxy;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Default implementation of the proxy manager that bridges the repository configuration classes to the proxy API. This
+ * class is not thread safe (due to the request handler being a non-thread safe requirement).
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo we should be able to configure "views" that sit in front of this (ie, prefix = /legacy, appears as layout maven-1.x, path gets translated before being passed on)
+ * @plexus.component instantiation-strategy="per-lookup"
+ */
+public class DefaultProxyManager
+ implements ProxyManager
+{
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.proxy.ProxyRequestHandler"
+ * @todo seems to be a bug in qdox that the role above is required
+ */
+ private ProxyRequestHandler requestHandler;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repositoryFactory;
+
+ /**
+ * The proxy groups for each managed repository.
+ */
+ private static Map/*<String,ProxiedRepositoryGroup>*/ proxyGroups;
+
+ /**
+ * The default proxy group/managed repository.
+ */
+ private static ProxiedRepositoryGroup defaultProxyGroup;
+
+ public File get( String path )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ assert path.startsWith( "/" );
+
+ Map groups = getProxyGroups();
+
+ ProxiedRepositoryGroup proxyGroup = parseRepositoryId( path, groups );
+
+ String repositoryPath = path;
+ if ( proxyGroup == null )
+ {
+ if ( defaultProxyGroup != null )
+ {
+ proxyGroup = defaultProxyGroup;
+ }
+ else
+ {
+ throw new ResourceDoesNotExistException( "No repositories exist under the path: " + path );
+ }
+ }
+ else
+ {
+ repositoryPath = repositoryPath.substring( proxyGroup.getManagedRepository().getId().length() + 2 );
+ }
+
+ return requestHandler.get( repositoryPath, proxyGroup.getProxiedRepositories(),
+ proxyGroup.getManagedRepository(), proxyGroup.getWagonProxy() );
+ }
+
+ public File getAlways( String path )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ assert path.startsWith( "/" );
+
+ Map groups = getProxyGroups();
+
+ ProxiedRepositoryGroup proxyGroup = parseRepositoryId( path, groups );
+
+ String repositoryPath = path;
+ if ( proxyGroup == null )
+ {
+ if ( defaultProxyGroup != null )
+ {
+ proxyGroup = defaultProxyGroup;
+ }
+ else
+ {
+ throw new ResourceDoesNotExistException( "No repositories exist under the path: " + path );
+ }
+ }
+ else
+ {
+ repositoryPath = repositoryPath.substring( proxyGroup.getManagedRepository().getId().length() + 2 );
+ }
+
+ return requestHandler.getAlways( repositoryPath, proxyGroup.getProxiedRepositories(),
+ proxyGroup.getManagedRepository(), proxyGroup.getWagonProxy() );
+ }
+
+ private Configuration getConfiguration()
+ throws ProxyException
+ {
+ Configuration configuration;
+ try
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+ }
+ catch ( ConfigurationStoreException e )
+ {
+ throw new ProxyException( "Error reading configuration, unable to proxy any requests: " + e.getMessage(),
+ e );
+ }
+ return configuration;
+ }
+
+ private Map getProxyGroups()
+ throws ProxyException
+ {
+ if ( proxyGroups == null )
+ {
+ Map groups = new HashMap();
+
+ Configuration configuration = getConfiguration();
+
+ ProxyInfo wagonProxy = createWagonProxy( configuration.getProxy() );
+
+ for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+ {
+ RepositoryConfiguration repository = (RepositoryConfiguration) i.next();
+ ArtifactRepository managedRepository = repositoryFactory.createRepository( repository );
+ List proxiedRepositories = getProxiedRepositoriesForManagedRepository(
+ configuration.getProxiedRepositories(), repository.getId() );
+
+ groups.put( repository.getId(),
+ new ProxiedRepositoryGroup( proxiedRepositories, managedRepository, wagonProxy ) );
+ }
+
+ // TODO: ability to configure default proxy separately
+
+ if ( groups.size() == 1 )
+ {
+ defaultProxyGroup = (ProxiedRepositoryGroup) groups.values().iterator().next();
+ }
+
+ proxyGroups = groups;
+ }
+ return proxyGroups;
+ }
+
+ private List getProxiedRepositoriesForManagedRepository( List proxiedRepositories, String id )
+ {
+ List repositories = new ArrayList();
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ ProxiedRepositoryConfiguration config = (ProxiedRepositoryConfiguration) i.next();
+
+ if ( config.getManagedRepository().equals( id ) )
+ {
+ repositories.add( repositoryFactory.createProxiedRepository( config ) );
+ }
+ }
+ return repositories;
+ }
+
+ private static ProxiedRepositoryGroup parseRepositoryId( String path, Map groups )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ ProxiedRepositoryGroup group = null;
+
+ for ( Iterator i = groups.entrySet().iterator(); i.hasNext() && group == null; )
+ {
+ Map.Entry entry = (Map.Entry) i.next();
+
+ if ( path.startsWith( "/" + entry.getKey() + "/" ) )
+ {
+ group = (ProxiedRepositoryGroup) entry.getValue();
+ }
+ }
+
+ return group;
+ }
+
+ private static ProxyInfo createWagonProxy( Proxy proxy )
+ {
+ ProxyInfo proxyInfo = null;
+ if ( proxy != null && !StringUtils.isEmpty( proxy.getHost() ) )
+ {
+ proxyInfo = new ProxyInfo();
+ proxyInfo.setHost( proxy.getHost() );
+ proxyInfo.setPort( proxy.getPort() );
+ proxyInfo.setUserName( proxy.getUsername() );
+ proxyInfo.setPassword( proxy.getPassword() );
+ proxyInfo.setNonProxyHosts( proxy.getNonProxyHosts() );
+ proxyInfo.setType( proxy.getProtocol() );
+ }
+ return proxyInfo;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+
+import java.util.List;
+
+/**
+ * A set of information to store for a group of proxies.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ProxiedRepositoryGroup
+{
+
+ /**
+ * The locally managed repository that caches proxied artifacts.
+ */
+ private ArtifactRepository managedRepository;
+
+ /**
+ * The remote repositories that are being proxied.
+ */
+ private List/*<ArtifactRepository>*/ proxiedRepositories;
+
+ /**
+ * A wagon proxy to communicate to the proxy repository over a proxy (eg, http proxy)... TerminologyOverflowException
+ */
+ private final ProxyInfo wagonProxy;
+
+ /**
+ * Constructor.
+ *
+ * @param proxiedRepositories the proxied repository
+ * @param managedRepository the locally managed repository
+ * @param wagonProxy the network proxy to use
+ */
+ public ProxiedRepositoryGroup( List/*<ArtifactRepository>*/ proxiedRepositories,
+ ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ {
+ this.proxiedRepositories = proxiedRepositories;
+
+ this.managedRepository = managedRepository;
+
+ this.wagonProxy = wagonProxy;
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param proxiedRepositories the proxied repository
+ * @param managedRepository the locally managed repository
+ */
+ public ProxiedRepositoryGroup( List/*<ArtifactRepository>*/ proxiedRepositories,
+ ArtifactRepository managedRepository )
+ {
+ this( proxiedRepositories, managedRepository, null );
+ }
+
+ public ArtifactRepository getManagedRepository()
+ {
+ return managedRepository;
+ }
+
+ public List getProxiedRepositories()
+ {
+ return proxiedRepositories;
+ }
+
+ public ProxyInfo getWagonProxy()
+ {
+ return wagonProxy;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+
+import java.io.File;
+
+/**
+ * Repository proxying component. This component will take requests for a given path within a managed repository
+ * and if it is not found or expired, will look in the specified proxy repositories.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface ProxyManager
+{
+ /**
+ * The Plexus role for the component.
+ */
+ String ROLE = ProxyManager.class.getName();
+
+ /**
+ * Used to retrieve a cached path or retrieve one if the cache does not contain it yet.
+ *
+ * @param path the expected repository path
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File get( String path )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to force remote download of the requested path from any the configured repositories. This method will
+ * only bypass the cache for searching but the requested path will still be cached.
+ *
+ * @param path the expected repository path
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws ResourceDoesNotExistException when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File getAlways( String path )
+ throws ProxyException, ResourceDoesNotExistException;
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationChangeException;
+import org.apache.maven.archiva.configuration.ConfigurationChangeListener;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.InvalidConfigurationException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
+import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
+import org.codehaus.plexus.scheduler.AbstractJob;
+import org.codehaus.plexus.scheduler.Scheduler;
+import org.quartz.CronTrigger;
+import org.quartz.JobDataMap;
+import org.quartz.JobDetail;
+import org.quartz.SchedulerException;
+
+import java.text.ParseException;
+
+/**
+ * Default implementation of a scheduling component for the application.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo should we use plexus-taskqueue instead of or in addition to this?
+ * @plexus.component role="org.apache.maven.archiva.scheduler.RepositoryTaskScheduler"
+ */
+public class DefaultRepositoryTaskScheduler
+ extends AbstractLogEnabled
+ implements RepositoryTaskScheduler, Startable, ConfigurationChangeListener
+{
+ /**
+ * @plexus.requirement
+ */
+ private Scheduler scheduler;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ private static final String DISCOVERER_GROUP = "DISCOVERER";
+
+ private static final String INDEXER_JOB = "indexerTask";
+
+ /**
+ * @plexus.requirement role-hint="indexer"
+ */
+ private RepositoryTask indexerTask;
+
+ public void start()
+ throws StartingException
+ {
+ Configuration configuration;
+ try
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+ configurationStore.addChangeListener( this );
+ }
+ catch ( ConfigurationStoreException e )
+ {
+ throw new StartingException( "Unable to read configuration from the store", e );
+ }
+
+ try
+ {
+ scheduleJobs( configuration );
+ }
+ catch ( ParseException e )
+ {
+ throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e );
+ }
+ catch ( SchedulerException e )
+ {
+ throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
+ }
+ }
+
+ private void scheduleJobs( Configuration configuration )
+ throws ParseException, SchedulerException
+ {
+ if ( configuration.getIndexPath() != null )
+ {
+ JobDetail jobDetail = new JobDetail( INDEXER_JOB, DISCOVERER_GROUP, RepositoryTaskJob.class );
+ JobDataMap dataMap = new JobDataMap();
+ dataMap.put( AbstractJob.LOGGER, getLogger() );
+ dataMap.put( RepositoryTaskJob.TASK_KEY, indexerTask );
+ jobDetail.setJobDataMap( dataMap );
+
+ getLogger().info( "Scheduling indexer: " + configuration.getIndexerCronExpression() );
+ CronTrigger trigger =
+ new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, configuration.getIndexerCronExpression() );
+ scheduler.scheduleJob( jobDetail, trigger );
+
+ // TODO: run as a job so it doesn't block startup/configuration saving
+ try
+ {
+ indexerTask.executeNowIfNeeded();
+ }
+ catch ( TaskExecutionException e )
+ {
+ getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
+ }
+ }
+ else
+ {
+ getLogger().info( "Not scheduling indexer - index path is not configured" );
+ }
+
+ // TODO: wire in the converter
+ }
+
+ public void stop()
+ throws StoppingException
+ {
+ try
+ {
+ scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP );
+ }
+ catch ( SchedulerException e )
+ {
+ throw new StoppingException( "Unable to unschedule tasks", e );
+ }
+ }
+
+ public void notifyOfConfigurationChange( Configuration configuration )
+ throws InvalidConfigurationException, ConfigurationChangeException
+ {
+ try
+ {
+ stop();
+
+ scheduleJobs( configuration );
+ }
+ catch ( StoppingException e )
+ {
+ throw new ConfigurationChangeException( "Unable to unschedule previous tasks", e );
+ }
+ catch ( ParseException e )
+ {
+ throw new InvalidConfigurationException( "indexerCronExpression", "Invalid cron expression", e );
+ }
+ catch ( SchedulerException e )
+ {
+ throw new ConfigurationChangeException( "Unable to schedule new tasks", e );
+ }
+ }
+
+ public void runIndexer()
+ throws TaskExecutionException
+ {
+ indexerTask.execute();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discovery.ArtifactDiscoverer;
+import org.apache.maven.archiva.discovery.DiscovererException;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Task for discovering changes in the repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.scheduler.RepositoryTask" role-hint="indexer"
+ */
+public class IndexerTask
+ extends AbstractLogEnabled
+ implements RepositoryTask
+{
+ /**
+ * Configuration store.
+ *
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory indexFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repoFactory;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.archiva.discovery.ArtifactDiscoverer"
+ */
+ private Map artifactDiscoverers;
+
+ /**
+ * @plexus.requirement role-hint="standard"
+ */
+ private RepositoryIndexRecordFactory recordFactory;
+
+ public void execute()
+ throws TaskExecutionException
+ {
+ Configuration configuration;
+ try
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+ }
+ catch ( ConfigurationStoreException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+
+ File indexPath = new File( configuration.getIndexPath() );
+
+ execute( configuration, indexPath );
+ }
+
+ private void execute( Configuration configuration, File indexPath )
+ throws TaskExecutionException
+ {
+ long time = System.currentTimeMillis();
+ getLogger().info( "Starting repository discovery process" );
+
+ try
+ {
+ for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
+ {
+ RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
+
+ if ( repositoryConfiguration.isIndexed() )
+ {
+ // TODO! include global ones
+ String blacklistedPatterns = repositoryConfiguration.getBlackListPatterns();
+ boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
+
+ ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
+
+ String layoutProperty = repositoryConfiguration.getLayout();
+ ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
+ List artifacts =
+ discoverer.discoverArtifacts( repository, "indexer", blacklistedPatterns, includeSnapshots );
+ if ( !artifacts.isEmpty() )
+ {
+ getLogger().info( "Indexing " + artifacts.size() + " new artifacts" );
+ indexArtifacts( artifacts, indexPath );
+ }
+ }
+ }
+ }
+ catch ( RepositoryIndexException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+ catch ( DiscovererException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+
+ time = System.currentTimeMillis() - time;
+ getLogger().info( "Finished repository indexing process in " + time + "ms" );
+ }
+
+ public void executeNowIfNeeded()
+ throws TaskExecutionException
+ {
+ Configuration configuration;
+ try
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+ }
+ catch ( ConfigurationStoreException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+
+ File indexPath = new File( configuration.getIndexPath() );
+
+ try
+ {
+ RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
+ if ( !artifactIndex.exists() )
+ {
+ execute( configuration, indexPath );
+ }
+ }
+ catch ( RepositoryIndexException e )
+ {
+ throw new TaskExecutionException( e.getMessage(), e );
+ }
+ }
+
+ private void indexArtifacts( List artifacts, File indexPath )
+ throws RepositoryIndexException
+ {
+ List records = new ArrayList();
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ records.add( recordFactory.createRecord( a ) );
+ }
+
+ RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
+ artifactIndex.indexRecords( records );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A repository task.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryTask
+{
+ /**
+ * Execute the task.
+ */
+ void execute()
+ throws TaskExecutionException;
+
+ /**
+ * Execute the task now if needed because the target doesn't exist.
+ */
+ void executeNowIfNeeded()
+ throws TaskExecutionException;
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler;\r
+\r
+/*\r
+ * Copyright 2005-2006 The Apache Software Foundation.\r
+ *\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ * http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ */\r
+\r
+import org.codehaus.plexus.scheduler.AbstractJob;\r
+import org.quartz.JobDataMap;\r
+import org.quartz.JobExecutionContext;\r
+import org.quartz.JobExecutionException;\r
+\r
+/**\r
+ * This class is the discoverer job that is executed by the scheduler.\r
+ */\r
+public class RepositoryTaskJob\r
+ extends AbstractJob\r
+{\r
+ static final String TASK_KEY = "EXECUTION";\r
+\r
+ /**\r
+ * Execute the discoverer and the indexer.\r
+ *\r
+ * @param context\r
+ * @throws org.quartz.JobExecutionException\r
+ *\r
+ */\r
+ public void execute( JobExecutionContext context )\r
+ throws JobExecutionException\r
+ {\r
+ JobDataMap dataMap = context.getJobDetail().getJobDataMap();\r
+ setJobDataMap( dataMap );\r
+\r
+ RepositoryTask executor = (RepositoryTask) dataMap.get( TASK_KEY );\r
+ try\r
+ {\r
+ executor.execute();\r
+ }\r
+ catch ( TaskExecutionException e )\r
+ {\r
+ throw new JobExecutionException( e );\r
+ }\r
+ }\r
+\r
+}\r
--- /dev/null
+package org.apache.maven.archiva.scheduler;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The component that takes care of scheduling in the application.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryTaskScheduler
+{
+ /**
+ * The Plexus component role.
+ */
+ String ROLE = RepositoryTaskScheduler.class.getName();
+
+ void runIndexer()
+ throws TaskExecutionException;
+}
--- /dev/null
+package org.apache.maven.archiva.scheduler;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occurring during task execution.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class TaskExecutionException
+ extends Exception
+{
+ public TaskExecutionException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository;
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.repository.converter.RepositoryConversionException;
-import org.apache.maven.repository.converter.RepositoryConverter;
-import org.apache.maven.repository.discovery.ArtifactDiscoverer;
-import org.apache.maven.repository.discovery.DiscovererException;
-import org.apache.maven.repository.reporting.ArtifactReporter;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * @author Jason van Zyl
- * @plexus.component
- */
-public class DefaultRepositoryManager
- implements RepositoryManager
-{
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactDiscoverer artifactDiscoverer;
-
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactRepositoryLayout legacyLayout;
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private ArtifactRepositoryLayout defaultLayout;
-
- /**
- * @plexus.requirement
- */
- private ArtifactRepositoryFactory artifactRepositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private RepositoryConverter repositoryConverter;
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private ArtifactReporter reporter;
-
- public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
- boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException
- {
- ArtifactRepository legacyRepository;
-
- ArtifactRepository repository;
-
- try
- {
- legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy",
- legacyRepositoryDirectory.toURI().toURL().toString(),
- legacyLayout, null, null );
-
- repository = artifactRepositoryFactory.createArtifactRepository( "default",
- repositoryDirectory.toURI().toURL().toString(),
- defaultLayout, null, null );
- }
- catch ( MalformedURLException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
-
- List legacyArtifacts =
- artifactDiscoverer.discoverArtifacts( legacyRepository, "converter", null, includeSnapshots );
-
- repositoryConverter.convert( legacyArtifacts, repository, reporter );
- }
-}
+++ /dev/null
-package org.apache.maven.repository;
-
-import org.apache.maven.repository.converter.RepositoryConversionException;
-import org.apache.maven.repository.discovery.DiscovererException;
-
-import java.io.File;
-
-/**
- * @author Jason van Zyl
- */
-public interface RepositoryManager
-{
- /**
- * Role of the Repository Manager
- */
- String ROLE = RepositoryManager.class.getName();
-
- /**
- * Convert a legacy repository to a modern repository. This means a Maven 1.x repository
- * using v3 POMs to a Maven 2.x repository using v4.0.0 POMs.
- *
- * @param legacyRepositoryDirectory
- * @param repositoryDirectory
- * @throws RepositoryConversionException
- */
- void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory, boolean includeSnapshots )
- throws RepositoryConversionException, DiscovererException;
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.repository.proxy.ProxiedArtifactRepository;
-
-import java.util.List;
-
-/**
- * Create an artifact repository from the given configuration.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface ConfiguredRepositoryFactory
-{
- String ROLE = ConfiguredRepositoryFactory.class.getName();
-
- /**
- * Create an artifact repository from the given configuration.
- *
- * @param configuration the configuration
- * @return the artifact repository
- */
- ArtifactRepository createRepository( RepositoryConfiguration configuration );
-
- /**
- * Create artifact repositories from the given configuration.
- *
- * @param configuration the configuration containing the repositories
- * @return the artifact repositories
- */
- List createRepositories( Configuration configuration );
-
- /**
- * Create a local repository from the given configuration.
- *
- * @param configuration the configuration
- * @return the local artifact repository
- */
- ArtifactRepository createLocalRepository( Configuration configuration );
-
- /**
- * Create an artifact repository from the given proxy repository configuration.
- *
- * @param configuration the configuration
- * @return the artifact repository
- */
- ProxiedArtifactRepository createProxiedRepository( ProxiedRepositoryConfiguration configuration );
-
- /**
- * Create artifact repositories from the given proxy repository configurations.
- *
- * @param configuration the configuration containing the repositories
- * @return the artifact repositories
- */
- List createProxiedRepositories( Configuration configuration );
-}
+++ /dev/null
-package org.apache.maven.repository.configuration;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.repository.proxy.ProxiedArtifactRepository;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Create artifact repositories from a configuration.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.configuration.ConfiguredRepositoryFactory"
- */
-public class DefaultConfiguredRepositoryFactory
- implements ConfiguredRepositoryFactory
-{
- /**
- * @plexus.requirement role="org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout"
- */
- private Map repositoryLayouts;
-
- /**
- * @plexus.requirement
- */
- private ArtifactRepositoryFactory repoFactory;
-
- public ArtifactRepository createRepository( RepositoryConfiguration configuration )
- {
- File repositoryDirectory = new File( configuration.getDirectory() );
- String repoDir = repositoryDirectory.toURI().toString();
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( configuration.getLayout() );
- return repoFactory.createArtifactRepository( configuration.getId(), repoDir, layout, null, null );
- }
-
- public ProxiedArtifactRepository createProxiedRepository( ProxiedRepositoryConfiguration configuration )
- {
- boolean enabled = isEnabled( configuration.getSnapshotsPolicy() );
- String updatePolicy =
- getUpdatePolicy( configuration.getSnapshotsPolicy(), configuration.getSnapshotsInterval() );
- ArtifactRepositoryPolicy snapshotsPolicy =
- new ArtifactRepositoryPolicy( enabled, updatePolicy, ArtifactRepositoryPolicy.CHECKSUM_POLICY_FAIL );
-
- enabled = isEnabled( configuration.getReleasesPolicy() );
- updatePolicy = getUpdatePolicy( configuration.getReleasesPolicy(), configuration.getReleasesInterval() );
- ArtifactRepositoryPolicy releasesPolicy =
- new ArtifactRepositoryPolicy( enabled, updatePolicy, ArtifactRepositoryPolicy.CHECKSUM_POLICY_FAIL );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( configuration.getLayout() );
- ArtifactRepository artifactRepository = repoFactory.createArtifactRepository( configuration.getId(),
- configuration.getUrl(), layout,
- snapshotsPolicy, releasesPolicy );
- ProxiedArtifactRepository repository = new ProxiedArtifactRepository( artifactRepository );
- repository.setCacheFailures( configuration.isCacheFailures() );
- repository.setHardFail( configuration.isHardFail() );
- repository.setName( configuration.getName() );
- repository.setUseNetworkProxy( configuration.isUseNetworkProxy() );
- return repository;
- }
-
- public List createRepositories( Configuration configuration )
- {
- List managedRepositories = configuration.getRepositories();
- List repositories = new ArrayList( managedRepositories.size() );
-
- for ( Iterator i = managedRepositories.iterator(); i.hasNext(); )
- {
- repositories.add( createRepository( (RepositoryConfiguration) i.next() ) );
- }
-
- return repositories;
- }
-
- public List createProxiedRepositories( Configuration configuration )
- {
- List proxiedRepositories = configuration.getProxiedRepositories();
- List repositories = new ArrayList( proxiedRepositories.size() );
-
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- repositories.add( createProxiedRepository( (ProxiedRepositoryConfiguration) i.next() ) );
- }
-
- return repositories;
- }
-
- public ArtifactRepository createLocalRepository( Configuration configuration )
- {
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( "default" );
- File localRepository = new File( configuration.getLocalRepository() );
- localRepository.mkdirs();
- return repoFactory.createArtifactRepository( "local", localRepository.toURI().toString(), layout, null, null );
- }
-
- private static String getUpdatePolicy( String policy, int interval )
- {
- return "interval".equals( policy ) ? policy + ":" + interval : policy;
- }
-
- private static boolean isEnabled( String policy )
- {
- return !"disabled".equals( policy );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.repository.configuration.ProxiedRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Proxy;
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Default implementation of the proxy manager that bridges the repository configuration classes to the proxy API. This
- * class is not thread safe (due to the request handler being a non-thread safe requirement).
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo we should be able to configure "views" that sit in front of this (ie, prefix = /legacy, appears as layout maven-1.x, path gets translated before being passed on)
- * @plexus.component instantiation-strategy="per-lookup"
- */
-public class DefaultProxyManager
- implements ProxyManager
-{
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- /**
- * @plexus.requirement role="org.apache.maven.repository.proxy.ProxyRequestHandler"
- * @todo seems to be a bug in qdox that the role above is required
- */
- private ProxyRequestHandler requestHandler;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repositoryFactory;
-
- /**
- * The proxy groups for each managed repository.
- */
- private static Map/*<String,ProxiedRepositoryGroup>*/ proxyGroups;
-
- /**
- * The default proxy group/managed repository.
- */
- private static ProxiedRepositoryGroup defaultProxyGroup;
-
- public File get( String path )
- throws ProxyException, ResourceDoesNotExistException
- {
- assert path.startsWith( "/" );
-
- Map groups = getProxyGroups();
-
- ProxiedRepositoryGroup proxyGroup = parseRepositoryId( path, groups );
-
- String repositoryPath = path;
- if ( proxyGroup == null )
- {
- if ( defaultProxyGroup != null )
- {
- proxyGroup = defaultProxyGroup;
- }
- else
- {
- throw new ResourceDoesNotExistException( "No repositories exist under the path: " + path );
- }
- }
- else
- {
- repositoryPath = repositoryPath.substring( proxyGroup.getManagedRepository().getId().length() + 2 );
- }
-
- return requestHandler.get( repositoryPath, proxyGroup.getProxiedRepositories(),
- proxyGroup.getManagedRepository(), proxyGroup.getWagonProxy() );
- }
-
- public File getAlways( String path )
- throws ProxyException, ResourceDoesNotExistException
- {
- assert path.startsWith( "/" );
-
- Map groups = getProxyGroups();
-
- ProxiedRepositoryGroup proxyGroup = parseRepositoryId( path, groups );
-
- String repositoryPath = path;
- if ( proxyGroup == null )
- {
- if ( defaultProxyGroup != null )
- {
- proxyGroup = defaultProxyGroup;
- }
- else
- {
- throw new ResourceDoesNotExistException( "No repositories exist under the path: " + path );
- }
- }
- else
- {
- repositoryPath = repositoryPath.substring( proxyGroup.getManagedRepository().getId().length() + 2 );
- }
-
- return requestHandler.getAlways( repositoryPath, proxyGroup.getProxiedRepositories(),
- proxyGroup.getManagedRepository(), proxyGroup.getWagonProxy() );
- }
-
- private Configuration getConfiguration()
- throws ProxyException
- {
- Configuration configuration;
- try
- {
- configuration = configurationStore.getConfigurationFromStore();
- }
- catch ( ConfigurationStoreException e )
- {
- throw new ProxyException( "Error reading configuration, unable to proxy any requests: " + e.getMessage(),
- e );
- }
- return configuration;
- }
-
- private Map getProxyGroups()
- throws ProxyException
- {
- if ( proxyGroups == null )
- {
- Map groups = new HashMap();
-
- Configuration configuration = getConfiguration();
-
- ProxyInfo wagonProxy = createWagonProxy( configuration.getProxy() );
-
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration repository = (RepositoryConfiguration) i.next();
- ArtifactRepository managedRepository = repositoryFactory.createRepository( repository );
- List proxiedRepositories = getProxiedRepositoriesForManagedRepository(
- configuration.getProxiedRepositories(), repository.getId() );
-
- groups.put( repository.getId(),
- new ProxiedRepositoryGroup( proxiedRepositories, managedRepository, wagonProxy ) );
- }
-
- // TODO: ability to configure default proxy separately
-
- if ( groups.size() == 1 )
- {
- defaultProxyGroup = (ProxiedRepositoryGroup) groups.values().iterator().next();
- }
-
- proxyGroups = groups;
- }
- return proxyGroups;
- }
-
- private List getProxiedRepositoriesForManagedRepository( List proxiedRepositories, String id )
- {
- List repositories = new ArrayList();
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- ProxiedRepositoryConfiguration config = (ProxiedRepositoryConfiguration) i.next();
-
- if ( config.getManagedRepository().equals( id ) )
- {
- repositories.add( repositoryFactory.createProxiedRepository( config ) );
- }
- }
- return repositories;
- }
-
- private static ProxiedRepositoryGroup parseRepositoryId( String path, Map groups )
- throws ProxyException, ResourceDoesNotExistException
- {
- ProxiedRepositoryGroup group = null;
-
- for ( Iterator i = groups.entrySet().iterator(); i.hasNext() && group == null; )
- {
- Map.Entry entry = (Map.Entry) i.next();
-
- if ( path.startsWith( "/" + entry.getKey() + "/" ) )
- {
- group = (ProxiedRepositoryGroup) entry.getValue();
- }
- }
-
- return group;
- }
-
- private static ProxyInfo createWagonProxy( Proxy proxy )
- {
- ProxyInfo proxyInfo = null;
- if ( proxy != null && !StringUtils.isEmpty( proxy.getHost() ) )
- {
- proxyInfo = new ProxyInfo();
- proxyInfo.setHost( proxy.getHost() );
- proxyInfo.setPort( proxy.getPort() );
- proxyInfo.setUserName( proxy.getUsername() );
- proxyInfo.setPassword( proxy.getPassword() );
- proxyInfo.setNonProxyHosts( proxy.getNonProxyHosts() );
- proxyInfo.setType( proxy.getProtocol() );
- }
- return proxyInfo;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-/**
- * A set of information to store for a group of proxies.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ProxiedRepositoryGroup
-{
-
- /**
- * The locally managed repository that caches proxied artifacts.
- */
- private ArtifactRepository managedRepository;
-
- /**
- * The remote repositories that are being proxied.
- */
- private List/*<ArtifactRepository>*/ proxiedRepositories;
-
- /**
- * A wagon proxy to communicate to the proxy repository over a proxy (eg, http proxy)... TerminologyOverflowException
- */
- private final ProxyInfo wagonProxy;
-
- /**
- * Constructor.
- *
- * @param proxiedRepositories the proxied repository
- * @param managedRepository the locally managed repository
- * @param wagonProxy the network proxy to use
- */
- public ProxiedRepositoryGroup( List/*<ArtifactRepository>*/ proxiedRepositories,
- ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- {
- this.proxiedRepositories = proxiedRepositories;
-
- this.managedRepository = managedRepository;
-
- this.wagonProxy = wagonProxy;
- }
-
- /**
- * Constructor.
- *
- * @param proxiedRepositories the proxied repository
- * @param managedRepository the locally managed repository
- */
- public ProxiedRepositoryGroup( List/*<ArtifactRepository>*/ proxiedRepositories,
- ArtifactRepository managedRepository )
- {
- this( proxiedRepositories, managedRepository, null );
- }
-
- public ArtifactRepository getManagedRepository()
- {
- return managedRepository;
- }
-
- public List getProxiedRepositories()
- {
- return proxiedRepositories;
- }
-
- public ProxyInfo getWagonProxy()
- {
- return wagonProxy;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-
-import java.io.File;
-
-/**
- * Repository proxying component. This component will take requests for a given path within a managed repository
- * and if it is not found or expired, will look in the specified proxy repositories.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface ProxyManager
-{
- /** The Plexus role for the component. */
- String ROLE = ProxyManager.class.getName();
-
- /**
- * Used to retrieve a cached path or retrieve one if the cache does not contain it yet.
- *
- * @param path the expected repository path
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException when the requested object can't be found in any of the
- * configured repositories
- */
- File get( String path )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to force remote download of the requested path from any the configured repositories. This method will
- * only bypass the cache for searching but the requested path will still be cached.
- *
- * @param path the expected repository path
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws ResourceDoesNotExistException when the requested object can't be found in any of the
- * configured repositories
- */
- File getAlways( String path )
- throws ProxyException, ResourceDoesNotExistException;
-}
+++ /dev/null
-package org.apache.maven.repository.scheduler;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationChangeException;
-import org.apache.maven.repository.configuration.ConfigurationChangeListener;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.InvalidConfigurationException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
-import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
-import org.codehaus.plexus.scheduler.AbstractJob;
-import org.codehaus.plexus.scheduler.Scheduler;
-import org.quartz.CronTrigger;
-import org.quartz.JobDataMap;
-import org.quartz.JobDetail;
-import org.quartz.SchedulerException;
-
-import java.text.ParseException;
-
-/**
- * Default implementation of a scheduling component for the application.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should we use plexus-taskqueue instead of or in addition to this?
- * @plexus.component role="org.apache.maven.repository.scheduler.RepositoryTaskScheduler"
- */
-public class DefaultRepositoryTaskScheduler
- extends AbstractLogEnabled
- implements RepositoryTaskScheduler, Startable, ConfigurationChangeListener
-{
- /**
- * @plexus.requirement
- */
- private Scheduler scheduler;
-
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- private static final String DISCOVERER_GROUP = "DISCOVERER";
-
- private static final String INDEXER_JOB = "indexerTask";
-
- /**
- * @plexus.requirement role-hint="indexer"
- */
- private RepositoryTask indexerTask;
-
- public void start()
- throws StartingException
- {
- Configuration configuration;
- try
- {
- configuration = configurationStore.getConfigurationFromStore();
- configurationStore.addChangeListener( this );
- }
- catch ( ConfigurationStoreException e )
- {
- throw new StartingException( "Unable to read configuration from the store", e );
- }
-
- try
- {
- scheduleJobs( configuration );
- }
- catch ( ParseException e )
- {
- throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e );
- }
- catch ( SchedulerException e )
- {
- throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
- }
- }
-
- private void scheduleJobs( Configuration configuration )
- throws ParseException, SchedulerException
- {
- if ( configuration.getIndexPath() != null )
- {
- JobDetail jobDetail = new JobDetail( INDEXER_JOB, DISCOVERER_GROUP, RepositoryTaskJob.class );
- JobDataMap dataMap = new JobDataMap();
- dataMap.put( AbstractJob.LOGGER, getLogger() );
- dataMap.put( RepositoryTaskJob.TASK_KEY, indexerTask );
- jobDetail.setJobDataMap( dataMap );
-
- getLogger().info( "Scheduling indexer: " + configuration.getIndexerCronExpression() );
- CronTrigger trigger =
- new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, configuration.getIndexerCronExpression() );
- scheduler.scheduleJob( jobDetail, trigger );
-
- // TODO: run as a job so it doesn't block startup/configuration saving
- try
- {
- indexerTask.executeNowIfNeeded();
- }
- catch ( TaskExecutionException e )
- {
- getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
- }
- }
- else
- {
- getLogger().info( "Not scheduling indexer - index path is not configured" );
- }
-
- // TODO: wire in the converter
- }
-
- public void stop()
- throws StoppingException
- {
- try
- {
- scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP );
- }
- catch ( SchedulerException e )
- {
- throw new StoppingException( "Unable to unschedule tasks", e );
- }
- }
-
- public void notifyOfConfigurationChange( Configuration configuration )
- throws InvalidConfigurationException, ConfigurationChangeException
- {
- try
- {
- stop();
-
- scheduleJobs( configuration );
- }
- catch ( StoppingException e )
- {
- throw new ConfigurationChangeException( "Unable to unschedule previous tasks", e );
- }
- catch ( ParseException e )
- {
- throw new InvalidConfigurationException( "indexerCronExpression", "Invalid cron expression", e );
- }
- catch ( SchedulerException e )
- {
- throw new ConfigurationChangeException( "Unable to schedule new tasks", e );
- }
- }
-
- public void runIndexer()
- throws TaskExecutionException
- {
- indexerTask.execute();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.scheduler;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.apache.maven.repository.discovery.ArtifactDiscoverer;
-import org.apache.maven.repository.discovery.DiscovererException;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Task for discovering changes in the repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.scheduler.RepositoryTask" role-hint="indexer"
- */
-public class IndexerTask
- extends AbstractLogEnabled
- implements RepositoryTask
-{
- /**
- * Configuration store.
- *
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory indexFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repoFactory;
-
- /**
- * @plexus.requirement role="org.apache.maven.repository.discovery.ArtifactDiscoverer"
- */
- private Map artifactDiscoverers;
-
- /**
- * @plexus.requirement role-hint="standard"
- */
- private RepositoryIndexRecordFactory recordFactory;
-
- public void execute()
- throws TaskExecutionException
- {
- Configuration configuration;
- try
- {
- configuration = configurationStore.getConfigurationFromStore();
- }
- catch ( ConfigurationStoreException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
-
- File indexPath = new File( configuration.getIndexPath() );
-
- execute( configuration, indexPath );
- }
-
- private void execute( Configuration configuration, File indexPath )
- throws TaskExecutionException
- {
- long time = System.currentTimeMillis();
- getLogger().info( "Starting repository discovery process" );
-
- try
- {
- for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
- {
- RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
-
- if ( repositoryConfiguration.isIndexed() )
- {
- // TODO! include global ones
- String blacklistedPatterns = repositoryConfiguration.getBlackListPatterns();
- boolean includeSnapshots = repositoryConfiguration.isIncludeSnapshots();
-
- ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
-
- String layoutProperty = repositoryConfiguration.getLayout();
- ArtifactDiscoverer discoverer = (ArtifactDiscoverer) artifactDiscoverers.get( layoutProperty );
- List artifacts =
- discoverer.discoverArtifacts( repository, "indexer", blacklistedPatterns, includeSnapshots );
- if ( !artifacts.isEmpty() )
- {
- getLogger().info( "Indexing " + artifacts.size() + " new artifacts" );
- indexArtifacts( artifacts, indexPath );
- }
- }
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( DiscovererException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
-
- time = System.currentTimeMillis() - time;
- getLogger().info( "Finished repository indexing process in " + time + "ms" );
- }
-
- public void executeNowIfNeeded()
- throws TaskExecutionException
- {
- Configuration configuration;
- try
- {
- configuration = configurationStore.getConfigurationFromStore();
- }
- catch ( ConfigurationStoreException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
-
- File indexPath = new File( configuration.getIndexPath() );
-
- try
- {
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- if ( !artifactIndex.exists() )
- {
- execute( configuration, indexPath );
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- }
-
- private void indexArtifacts( List artifacts, File indexPath )
- throws RepositoryIndexException
- {
- List records = new ArrayList();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- records.add( recordFactory.createRecord( a ) );
- }
-
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- artifactIndex.indexRecords( records );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.scheduler;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * A repository task.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryTask
-{
- /**
- * Execute the task.
- */
- void execute()
- throws TaskExecutionException;
-
- /**
- * Execute the task now if needed because the target doesn't exist.
- */
- void executeNowIfNeeded()
- throws TaskExecutionException;
-}
+++ /dev/null
-package org.apache.maven.repository.scheduler;\r
-\r
-/*\r
- * Copyright 2005-2006 The Apache Software Foundation.\r
- *\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- */\r
-\r
-import org.codehaus.plexus.scheduler.AbstractJob;\r
-import org.quartz.JobDataMap;\r
-import org.quartz.JobExecutionContext;\r
-import org.quartz.JobExecutionException;\r
-\r
-/**\r
- * This class is the discoverer job that is executed by the scheduler.\r
- */\r
-public class RepositoryTaskJob\r
- extends AbstractJob\r
-{\r
- static final String TASK_KEY = "EXECUTION";\r
-\r
- /**\r
- * Execute the discoverer and the indexer.\r
- *\r
- * @param context\r
- * @throws org.quartz.JobExecutionException\r
- *\r
- */\r
- public void execute( JobExecutionContext context )\r
- throws JobExecutionException\r
- {\r
- JobDataMap dataMap = context.getJobDetail().getJobDataMap();\r
- setJobDataMap( dataMap );\r
-\r
- RepositoryTask executor = (RepositoryTask) dataMap.get( TASK_KEY );\r
- try\r
- {\r
- executor.execute();\r
- }\r
- catch ( TaskExecutionException e )\r
- {\r
- throw new JobExecutionException( e );\r
- }\r
- }\r
-\r
-}\r
+++ /dev/null
-package org.apache.maven.repository.scheduler;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * The component that takes care of scheduling in the application.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryTaskScheduler
-{
- /**
- * The Plexus component role.
- */
- String ROLE = RepositoryTaskScheduler.class.getName();
-
- void runIndexer()
- throws TaskExecutionException;
-}
+++ /dev/null
-package org.apache.maven.repository.scheduler;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occurring during task execution.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class TaskExecutionException
- extends Exception
-{
- public TaskExecutionException( String message, Throwable t )
- {
- super( message, t );
- }
-}
--- /dev/null
+package org.apache.maven.archiva;
+
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ * @author Jason van Zyl
+ */
+public class RepositoryManagerTest
+ extends PlexusTestCase
+{
+ public void testLegacyRepositoryConversion()
+ throws Exception
+ {
+ File legacyRepositoryDirectory = getTestFile( "src/test/maven-1.x-repository" );
+
+ File repositoryDirectory = getTestFile( "target/maven-2.x-repository" );
+
+ RepositoryManager rm = (RepositoryManager) lookup( RepositoryManager.ROLE );
+
+ rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, true );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository;
-
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- * @author Jason van Zyl
- */
-public class RepositoryManagerTest
- extends PlexusTestCase
-{
- public void testLegacyRepositoryConversion()
- throws Exception
- {
- File legacyRepositoryDirectory = getTestFile( "src/test/maven-1.x-repository" );
-
- File repositoryDirectory = getTestFile( "target/maven-2.x-repository" );
-
- RepositoryManager rm = (RepositoryManager) lookup( RepositoryManager.ROLE );
-
- rm.convertLegacyRepository( legacyRepositoryDirectory, repositoryDirectory, true );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.util.xml.Xpp3Dom;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Base class for artifact discoverers.
+ *
+ * @author John Casey
+ * @author Brett Porter
+ */
+public abstract class AbstractArtifactDiscoverer
+ extends AbstractDiscoverer
+ implements ArtifactDiscoverer
+{
+ /**
+ * Standard patterns to exclude from discovery as they are not artifacts.
+ */
+ private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".maven/**", "**/*.md5",
+ "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**", "*/licenses/**", "*/licences/**",
+ "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*", "**/CHANGELOG*", "**/KEYS*"};
+
+ private List scanForArtifactPaths( File repositoryBase, String blacklistedPatterns, long comparisonTimestamp )
+ {
+ return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES,
+ comparisonTimestamp );
+ }
+
+ public List discoverArtifacts( ArtifactRepository repository, String operation, String blacklistedPatterns,
+ boolean includeSnapshots )
+ throws DiscovererException
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
+ }
+
+ Xpp3Dom dom = getLastArtifactDiscoveryDom( readRepositoryMetadataDom( repository ) );
+ long comparisonTimestamp = readComparisonTimestamp( repository, operation, dom );
+
+ // Note that last checked time is deliberately set to the start of the process so that anything added
+ // mid-discovery and missed by the scanner will get checked next time.
+ // Due to this, there must be no negative side-effects of discovering something twice.
+ Date newLastCheckedTime = new Date();
+
+ File repositoryBase = new File( repository.getBasedir() );
+
+ List artifacts = new ArrayList();
+
+ List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns, comparisonTimestamp );
+
+ // Also note that the last check time, while set at the start, is saved at the end, so that if any exceptions
+ // occur, then the timestamp is not updated so that the discovery is attempted again
+ // TODO: under the list-return behaviour we have now, exceptions might occur later and the timestamp will not be reset - see MRM-83
+ try
+ {
+ setLastCheckedTime( repository, operation, newLastCheckedTime );
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Error writing metadata: " + e.getMessage(), e );
+ }
+
+ for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
+ {
+ String path = (String) i.next();
+
+ try
+ {
+ Artifact artifact = buildArtifactFromPath( path, repository );
+
+ if ( includeSnapshots || !artifact.isSnapshot() )
+ {
+ artifacts.add( artifact );
+ }
+ }
+ catch ( DiscovererException e )
+ {
+ addKickedOutPath( path, e.getMessage() );
+ }
+ }
+
+ return artifacts;
+ }
+
+ /**
+ * Returns an artifact object that is represented by the specified path in a repository
+ *
+ * @param path The path that is pointing to an artifact
+ * @param repository The repository of the artifact
+ * @return Artifact
+ * @throws DiscovererException when the specified path does correspond to an artifact
+ */
+ public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
+ throws DiscovererException
+ {
+ Artifact artifact = buildArtifact( path );
+
+ if ( artifact != null )
+ {
+ artifact.setRepository( repository );
+ artifact.setFile( new File( repository.getBasedir(), path ) );
+ }
+
+ return artifact;
+ }
+
+ public void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
+ throws IOException
+ {
+ // see notes in resetLastCheckedTime
+
+ File file = new File( repository.getBasedir(), "maven-metadata.xml" );
+
+ Xpp3Dom dom = readDom( file );
+
+ String dateString = new SimpleDateFormat( DATE_FMT, Locale.US ).format( date );
+
+ setEntry( getLastArtifactDiscoveryDom( dom ), operation, dateString );
+
+ saveDom( file, dom );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.DirectoryScanner;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.StringUtils;
+import org.codehaus.plexus.util.xml.Xpp3Dom;
+import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
+import org.codehaus.plexus.util.xml.Xpp3DomWriter;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Base class for the artifact and metadata discoverers.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractDiscoverer
+ extends AbstractLogEnabled
+ implements Discoverer
+{
+ private List kickedOutPaths = new ArrayList();
+
+ /**
+ * @plexus.requirement
+ */
+ protected ArtifactFactory artifactFactory;
+
+ private static final String[] EMPTY_STRING_ARRAY = new String[0];
+
+ private List excludedPaths = new ArrayList();
+
+ /**
+ * @plexus.configuration default-value="60000"
+ */
+ private int blackoutPeriod;
+
+ protected static final String DATE_FMT = "yyyyMMddHHmmss";
+
+ /**
+ * Add a path to the list of files that were kicked out due to being invalid.
+ *
+ * @param path the path to add
+ * @param reason the reason why the path is being kicked out
+ */
+ protected void addKickedOutPath( String path, String reason )
+ {
+ kickedOutPaths.add( new DiscovererPath( path, reason ) );
+ }
+
+ /**
+ * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
+ *
+ * @return Iterator for the DiscovererPath List
+ */
+ public Iterator getKickedOutPathsIterator()
+ {
+ return kickedOutPaths.iterator();
+ }
+
+ protected List scanForArtifactPaths( File repositoryBase, String blacklistedPatterns, String[] includes,
+ String[] excludes, long comparisonTimestamp )
+ {
+ List allExcludes = new ArrayList();
+ allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
+ if ( excludes != null )
+ {
+ allExcludes.addAll( Arrays.asList( excludes ) );
+ }
+
+ if ( !StringUtils.isEmpty( blacklistedPatterns ) )
+ {
+ allExcludes.addAll( Arrays.asList( blacklistedPatterns.split( "," ) ) );
+ }
+
+ DirectoryScanner scanner = new DirectoryScanner();
+ scanner.setBasedir( repositoryBase );
+ if ( includes != null )
+ {
+ scanner.setIncludes( includes );
+ }
+ scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
+
+ // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
+ scanner.scan();
+
+ for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
+ {
+ String path = files.next().toString();
+
+ excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
+ }
+
+ // TODO: this could be a part of the scanner
+ List includedPaths = new ArrayList();
+ for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
+ {
+ String path = files.next().toString();
+
+ long modTime = new File( repositoryBase, path ).lastModified();
+ if ( modTime < System.currentTimeMillis() - blackoutPeriod )
+ {
+ if ( modTime > comparisonTimestamp )
+ {
+ includedPaths.add( path );
+ }
+ }
+ }
+
+ return includedPaths;
+ }
+
+ /**
+ * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
+ *
+ * @return Iterator for the DiscovererPath List
+ */
+ public Iterator getExcludedPathsIterator()
+ {
+ return excludedPaths.iterator();
+ }
+
+ protected long readComparisonTimestamp( ArtifactRepository repository, String operation, Xpp3Dom dom )
+ {
+ Xpp3Dom entry = dom.getChild( operation );
+ long comparisonTimestamp = 0;
+ if ( entry != null )
+ {
+ try
+ {
+ comparisonTimestamp = new SimpleDateFormat( DATE_FMT, Locale.US ).parse( entry.getValue() ).getTime();
+ }
+ catch ( ParseException e )
+ {
+ getLogger().error( "Timestamp was invalid: " + entry.getValue() + "; ignoring" );
+ }
+ }
+ return comparisonTimestamp;
+ }
+
+ protected Xpp3Dom readDom( File file )
+ {
+ Xpp3Dom dom;
+ FileReader fileReader = null;
+ try
+ {
+ fileReader = new FileReader( file );
+ dom = Xpp3DomBuilder.build( fileReader );
+ }
+ catch ( FileNotFoundException e )
+ {
+ // Safe to ignore
+ dom = new Xpp3Dom( "metadata" );
+ }
+ catch ( XmlPullParserException e )
+ {
+ getLogger().error( "Error reading metadata (ignoring and recreating): " + e.getMessage() );
+ dom = new Xpp3Dom( "metadata" );
+ }
+ catch ( IOException e )
+ {
+ getLogger().error( "Error reading metadata (ignoring and recreating): " + e.getMessage() );
+ dom = new Xpp3Dom( "metadata" );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+ return dom;
+ }
+
+ protected Xpp3Dom getLastArtifactDiscoveryDom( Xpp3Dom dom )
+ {
+ Xpp3Dom lastDiscoveryDom = dom.getChild( "lastArtifactDiscovery" );
+ if ( lastDiscoveryDom == null )
+ {
+ dom.addChild( new Xpp3Dom( "lastArtifactDiscovery" ) );
+ lastDiscoveryDom = dom.getChild( "lastArtifactDiscovery" );
+ }
+ return lastDiscoveryDom;
+ }
+
+ protected Xpp3Dom getLastMetadataDiscoveryDom( Xpp3Dom dom )
+ {
+ Xpp3Dom lastDiscoveryDom = dom.getChild( "lastMetadataDiscovery" );
+ if ( lastDiscoveryDom == null )
+ {
+ dom.addChild( new Xpp3Dom( "lastMetadataDiscovery" ) );
+ lastDiscoveryDom = dom.getChild( "lastMetadataDiscovery" );
+ }
+ return lastDiscoveryDom;
+ }
+
+ public void resetLastCheckedTime( ArtifactRepository repository, String operation )
+ throws IOException
+ {
+ // TODO: get these changes into maven-metadata.xml and migrate towards that. The model is further diverging to a different layout at each level so submodels might be a good idea.
+ // TODO: maven-artifact probably needs an improved pathOfMetadata to cope with top level metadata
+ // TODO: might we need to write this as maven-metadata-local in some circumstances? merge others? Probably best to keep it simple and just use this format at the root. No need to merge anything that I can see
+ // TODO: since this metadata isn't meant to be shared, perhaps another file is called for after all.
+ // Format is: <repository><lastDiscovery><KEY>yyyyMMddHHmmss</KEY></lastDiscovery></repository> (ie, flat properties)
+
+ File file = new File( repository.getBasedir(), "maven-metadata.xml" );
+
+ Xpp3Dom dom = readDom( file );
+
+ boolean changed = false;
+
+ if ( removeEntry( getLastArtifactDiscoveryDom( dom ), operation ) )
+ {
+ changed = true;
+ }
+
+ if ( removeEntry( getLastMetadataDiscoveryDom( dom ), operation ) )
+ {
+ changed = true;
+ }
+
+ if ( changed )
+ {
+ saveDom( file, dom );
+ }
+ }
+
+ private boolean removeEntry( Xpp3Dom lastDiscoveryDom, String operation )
+ {
+ boolean changed = false;
+
+ // do this in reverse so that removing doesn't affect counter
+ Xpp3Dom[] children = lastDiscoveryDom.getChildren();
+ for ( int i = lastDiscoveryDom.getChildCount() - 1; i >= 0; i-- )
+ {
+ if ( children[i].getName().equals( operation ) )
+ {
+ changed = true;
+ lastDiscoveryDom.removeChild( i );
+ }
+ }
+ return changed;
+ }
+
+ protected void saveDom( File file, Xpp3Dom dom )
+ throws IOException
+ {
+ FileWriter writer = new FileWriter( file );
+
+ // save metadata
+ try
+ {
+ Xpp3DomWriter.write( writer, dom );
+ }
+ finally
+ {
+ IOUtil.close( writer );
+ }
+ }
+
+ protected void setEntry( Xpp3Dom lastDiscoveryDom, String operation, String dateString )
+ {
+ Xpp3Dom entry = lastDiscoveryDom.getChild( operation );
+ if ( entry == null )
+ {
+ entry = new Xpp3Dom( operation );
+ lastDiscoveryDom.addChild( entry );
+ }
+ entry.setValue( dateString );
+ }
+
+ protected Xpp3Dom readRepositoryMetadataDom( ArtifactRepository repository )
+ {
+ return readDom( new File( repository.getBasedir(), "maven-metadata.xml" ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * Interface for implementation that can discover artifacts within a repository.
+ *
+ * @author John Casey
+ * @author Brett Porter
+ * @todo do we want blacklisted patterns in another form? Part of the object construction?
+ * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
+ * @todo instead of a returned list, should a listener be passed in?
+ */
+public interface ArtifactDiscoverer
+ extends Discoverer
+{
+ String ROLE = ArtifactDiscoverer.class.getName();
+
+ /**
+ * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
+ * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
+ * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
+ *
+ * @param repository the location of the repository
+ * @param operation the operation being used to discover for timestamp checking
+ * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
+ * @param includeSnapshots whether to discover snapshots
+ * @return the list of artifacts discovered
+ * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
+ */
+ List discoverArtifacts( ArtifactRepository repository, String operation, String blacklistedPatterns,
+ boolean includeSnapshots )
+ throws DiscovererException;
+
+ /**
+ * Build an artifact from a path in the repository
+ *
+ * @param path the path
+ * @return the artifact
+ * @throws DiscovererException if the file is not a valid artifact
+ * @todo this should be in maven-artifact
+ */
+ Artifact buildArtifact( String path )
+ throws DiscovererException;
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+
+/**
+ * Artifact discoverer for the new repository layout (Maven 2.0+).
+ *
+ * @author John Casey
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.discovery.ArtifactDiscoverer" role-hint="default"
+ */
+public class DefaultArtifactDiscoverer
+ extends AbstractArtifactDiscoverer
+{
+ /**
+ * @see org.apache.maven.archiva.discovery.ArtifactDiscoverer#buildArtifact(String)
+ */
+ public Artifact buildArtifact( String path )
+ throws DiscovererException
+ {
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( path, "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+
+ Artifact artifact;
+ if ( pathParts.size() >= 4 )
+ {
+ // maven 2.x path
+
+ // the actual artifact filename.
+ String filename = (String) pathParts.remove( 0 );
+
+ // the next one is the version.
+ String version = (String) pathParts.remove( 0 );
+
+ // the next one is the artifactId.
+ String artifactId = (String) pathParts.remove( 0 );
+
+ // the remaining are the groupId.
+ Collections.reverse( pathParts );
+ String groupId = StringUtils.join( pathParts.iterator(), "." );
+
+ String remainingFilename = filename;
+ if ( remainingFilename.startsWith( artifactId + "-" ) )
+ {
+ remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
+
+ String classifier = null;
+
+ // TODO: use artifact handler, share with legacy discoverer
+ String type;
+ if ( remainingFilename.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+ remainingFilename =
+ remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
+ }
+ else if ( remainingFilename.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+ remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
+ }
+ else if ( remainingFilename.endsWith( "-sources.jar" ) )
+ {
+ type = "java-source";
+ classifier = "sources";
+ remainingFilename =
+ remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
+ }
+ else
+ {
+ int index = remainingFilename.lastIndexOf( "." );
+ if ( index >= 0 )
+ {
+ type = remainingFilename.substring( index + 1 );
+ remainingFilename = remainingFilename.substring( 0, index );
+ }
+ else
+ {
+ throw new DiscovererException( "Path filename does not have an extension" );
+ }
+ }
+
+ Artifact result;
+ if ( classifier == null )
+ {
+ result =
+ artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
+ }
+ else
+ {
+ result =
+ artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+ }
+
+ if ( result.isSnapshot() )
+ {
+ // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
+ int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
+ if ( classifierIndex >= 0 )
+ {
+ classifier = remainingFilename.substring( classifierIndex + 1 );
+ remainingFilename = remainingFilename.substring( 0, classifierIndex );
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
+ type, classifier );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+
+ // poor encapsulation requires we do this to populate base version
+ if ( !result.isSnapshot() )
+ {
+ throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
+ }
+ else if ( !result.getBaseVersion().equals( version ) )
+ {
+ throw new DiscovererException(
+ "Built snapshot artifact base version does not match path version: " + result +
+ "; should have been version: " + version );
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else if ( !remainingFilename.startsWith( version ) )
+ {
+ throw new DiscovererException( "Built artifact version does not match path version" );
+ }
+ else if ( !remainingFilename.equals( version ) )
+ {
+ if ( remainingFilename.charAt( version.length() ) == '-' )
+ {
+ classifier = remainingFilename.substring( version.length() + 1 );
+ artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifier );
+ }
+ else
+ {
+ throw new DiscovererException( "Path version does not corresspond to an artifact version" );
+ }
+ }
+ else
+ {
+ artifact = result;
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path filename does not correspond to an artifact" );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path is too short to build an artifact from" );
+ }
+
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.StringUtils;
+import org.codehaus.plexus.util.xml.Xpp3Dom;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.StringTokenizer;
+
+/**
+ * This class gets all the paths that contain the metadata files.
+ *
+ * @plexus.component role="org.apache.maven.archiva.discovery.MetadataDiscoverer" role-hint="default"
+ */
+public class DefaultMetadataDiscoverer
+ extends AbstractDiscoverer
+ implements MetadataDiscoverer
+{
+ /**
+ * Standard patterns to include in discovery of metadata files.
+ *
+ * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
+ * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
+ * searching the local metadata in the first place though?
+ */
+ private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
+
+ public List discoverMetadata( ArtifactRepository repository, String operation, String blacklistedPatterns )
+ throws DiscovererException
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
+ }
+
+ Xpp3Dom dom = getLastMetadataDiscoveryDom( readRepositoryMetadataDom( repository ) );
+ long comparisonTimestamp = readComparisonTimestamp( repository, operation, dom );
+
+ // Note that last checked time is deliberately set to the start of the process so that anything added
+ // mid-discovery and missed by the scanner will get checked next time.
+ // Due to this, there must be no negative side-effects of discovering something twice.
+ Date newLastCheckedTime = new Date();
+
+ List metadataFiles = new ArrayList();
+ List metadataPaths = scanForArtifactPaths( new File( repository.getBasedir() ), blacklistedPatterns,
+ STANDARD_DISCOVERY_INCLUDES, null, comparisonTimestamp );
+
+ // Also note that the last check time, while set at the start, is saved at the end, so that if any exceptions
+ // occur, then the timestamp is not updated so that the discovery is attempted again
+ // TODO: under the list-return behaviour we have now, exceptions might occur later and the timestamp will not be reset - see MRM-83
+ try
+ {
+ setLastCheckedTime( repository, operation, newLastCheckedTime );
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Error writing metadata: " + e.getMessage(), e );
+ }
+
+ for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
+ {
+ String metadataPath = (String) i.next();
+ try
+ {
+ RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
+ metadataFiles.add( metadata );
+ }
+ catch ( DiscovererException e )
+ {
+ addKickedOutPath( metadataPath, e.getMessage() );
+ }
+ }
+
+ return metadataFiles;
+ }
+
+ private RepositoryMetadata buildMetadata( String repo, String metadataPath )
+ throws DiscovererException
+ {
+ Metadata m;
+ String repoPath = repo + "/" + metadataPath;
+ try
+ {
+ URL url = new File( repoPath ).toURI().toURL();
+ InputStream is = url.openStream();
+ Reader reader = new InputStreamReader( is );
+ MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
+
+ m = metadataReader.read( reader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new DiscovererException( "Error parsing metadata file '" + repoPath + "': " + e.getMessage(), e );
+ }
+ catch ( MalformedURLException e )
+ {
+ // shouldn't happen
+ throw new DiscovererException( "Error constructing metadata file '" + repoPath + "': " + e.getMessage(),
+ e );
+ }
+ catch ( IOException e )
+ {
+ throw new DiscovererException( "Error reading metadata file '" + repoPath + "': " + e.getMessage(), e );
+ }
+
+ RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
+
+ if ( repositoryMetadata == null )
+ {
+ throw new DiscovererException( "Unable to build a repository metadata from path" );
+ }
+
+ return repositoryMetadata;
+ }
+
+ /**
+ * Builds a RepositoryMetadata object from a Metadata object and its path.
+ *
+ * @param m Metadata
+ * @param metadataPath path
+ * @return RepositoryMetadata if the parameters represent one; null if not
+ * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
+ */
+ private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
+ {
+ String metaGroupId = m.getGroupId();
+ String metaArtifactId = m.getArtifactId();
+ String metaVersion = m.getVersion();
+
+ // check if the groupId, artifactId and version is in the
+ // metadataPath
+ // parse the path, in reverse order
+ List pathParts = new ArrayList();
+ StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
+ while ( st.hasMoreTokens() )
+ {
+ pathParts.add( st.nextToken() );
+ }
+
+ Collections.reverse( pathParts );
+ // remove the metadata file
+ pathParts.remove( 0 );
+ Iterator it = pathParts.iterator();
+ String tmpDir = (String) it.next();
+
+ Artifact artifact = null;
+ if ( !StringUtils.isEmpty( metaVersion ) )
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
+ }
+
+ // snapshotMetadata
+ RepositoryMetadata metadata = null;
+ if ( tmpDir != null && tmpDir.equals( metaVersion ) )
+ {
+ if ( artifact != null )
+ {
+ metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
+ {
+ // artifactMetadata
+ if ( artifact != null )
+ {
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ else
+ {
+ artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ }
+ }
+ else
+ {
+ String groupDir = "";
+ int ctr = 0;
+ for ( it = pathParts.iterator(); it.hasNext(); )
+ {
+ String path = (String) it.next();
+ if ( ctr == 0 )
+ {
+ groupDir = path;
+ }
+ else
+ {
+ groupDir = path + "." + groupDir;
+ }
+ ctr++;
+ }
+
+ // groupMetadata
+ if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
+ {
+ metadata = new GroupRepositoryMetadata( metaGroupId );
+ }
+ }
+
+ return metadata;
+ }
+
+ public void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
+ throws IOException
+ {
+ // see notes in resetLastCheckedTime
+
+ File file = new File( repository.getBasedir(), "maven-metadata.xml" );
+
+ Xpp3Dom dom = readDom( file );
+
+ String dateString = new SimpleDateFormat( DATE_FMT, Locale.US ).format( date );
+
+ setEntry( getLastMetadataDiscoveryDom( dom ), operation, dateString );
+
+ saveDom( file, dom );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Iterator;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public interface Discoverer
+{
+ /**
+ * Get the list of paths kicked out during the discovery process.
+ *
+ * @return the paths as Strings.
+ */
+ Iterator getKickedOutPathsIterator();
+
+ /**
+ * Get the list of paths excluded during the discovery process.
+ *
+ * @return the paths as Strings.
+ */
+ Iterator getExcludedPathsIterator();
+
+ /**
+ * Reset the time in the repository that indicates the last time a check was performed.
+ *
+ * @param repository the location of the repository
+ * @param operation the operation to record the timestamp for
+ * @throws java.io.IOException if there is a non-recoverable problem reading or writing the metadata
+ */
+ void resetLastCheckedTime( ArtifactRepository repository, String operation )
+ throws IOException;
+
+ /**
+ * Set the time in the repository that indicates the last time a check was performed.
+ *
+ * @param repository the location of the repository
+ * @param operation the operation to record the timestamp for
+ * @param date the date to set the last check to
+ * @throws java.io.IOException if there is a non-recoverable problem reading or writing the metadata
+ */
+ void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
+ throws IOException;
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class DiscovererException
+ extends Exception
+{
+ public DiscovererException( String message )
+ {
+ super( message );
+ }
+
+ public DiscovererException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class DiscovererPath
+{
+ /**
+ * The path discovered.
+ */
+ private final String path;
+
+ /**
+ * A comment about why the path is being processed.
+ */
+ private final String comment;
+
+ public DiscovererPath( String path, String comment )
+ {
+ this.path = path;
+ this.comment = comment;
+ }
+
+ public String getPath()
+ {
+ return path;
+ }
+
+ public String getComment()
+ {
+ return comment;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.StringTokenizer;
+
+/**
+ * Artifact discoverer for the legacy repository layout (Maven 1.x).
+ * Method used to build an artifact object using a relative path from a repository base directory. An artifactId
+ * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
+ * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
+ * they are reserved for version usage.
+ *
+ * @author John Casey
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.discovery.ArtifactDiscoverer" role-hint="legacy"
+ */
+public class LegacyArtifactDiscoverer
+ extends AbstractArtifactDiscoverer
+{
+ /**
+ * @see org.apache.maven.archiva.discovery.ArtifactDiscoverer#buildArtifact(String)
+ */
+ public Artifact buildArtifact( String path )
+ throws DiscovererException
+ {
+ StringTokenizer tokens = new StringTokenizer( path, "/\\" );
+
+ Artifact result;
+
+ int numberOfTokens = tokens.countTokens();
+
+ if ( numberOfTokens == 3 )
+ {
+ String groupId = tokens.nextToken();
+
+ String type = tokens.nextToken();
+
+ if ( type.endsWith( "s" ) )
+ {
+ type = type.substring( 0, type.length() - 1 );
+
+ // contains artifactId, version, classifier, and extension.
+ String avceGlob = tokens.nextToken();
+
+ //noinspection CollectionDeclaredAsConcreteClass
+ LinkedList avceTokenList = new LinkedList();
+
+ StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
+ while ( avceTokenizer.hasMoreTokens() )
+ {
+ avceTokenList.addLast( avceTokenizer.nextToken() );
+ }
+
+ String lastAvceToken = (String) avceTokenList.removeLast();
+
+ // TODO: share with other discoverer, use artifact handlers instead
+ if ( lastAvceToken.endsWith( ".tar.gz" ) )
+ {
+ type = "distribution-tgz";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( "sources.jar" ) )
+ {
+ type = "java-source";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else if ( lastAvceToken.endsWith( ".zip" ) )
+ {
+ type = "distribution-zip";
+
+ lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ int extPos = lastAvceToken.lastIndexOf( '.' );
+
+ if ( extPos > 0 )
+ {
+ String ext = lastAvceToken.substring( extPos + 1 );
+ if ( type.equals( ext ) )
+ {
+ lastAvceToken = lastAvceToken.substring( 0, extPos );
+
+ avceTokenList.addLast( lastAvceToken );
+ }
+ else
+ {
+ throw new DiscovererException( "Path type does not match the extension" );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path filename does not have an extension" );
+ }
+ }
+
+ // let's discover the version, and whatever's leftover will be either
+ // a classifier, or part of the artifactId, depending on position.
+ // Since version is at the end, we have to move in from the back.
+ Collections.reverse( avceTokenList );
+
+ // TODO: this is obscene - surely a better way?
+ String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
+ "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
+ "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
+ "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
+ "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
+ "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
+ "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
+
+ StringBuffer classifierBuffer = new StringBuffer();
+ StringBuffer versionBuffer = new StringBuffer();
+
+ boolean firstVersionTokenEncountered = false;
+ boolean firstToken = true;
+
+ int tokensIterated = 0;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ boolean tokenIsVersionPart = token.matches( validVersionParts );
+
+ StringBuffer bufferToUpdate;
+
+ // NOTE: logic in code is reversed, since we're peeling off the back
+ // Any token after the last versionPart will be in the classifier.
+ // Any token UP TO first non-versionPart is part of the version.
+ if ( !tokenIsVersionPart )
+ {
+ if ( firstVersionTokenEncountered )
+ {
+ //noinspection BreakStatement
+ break;
+ }
+ else
+ {
+ bufferToUpdate = classifierBuffer;
+ }
+ }
+ else
+ {
+ firstVersionTokenEncountered = true;
+
+ bufferToUpdate = versionBuffer;
+ }
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ bufferToUpdate.insert( 0, '-' );
+ }
+
+ bufferToUpdate.insert( 0, token );
+
+ tokensIterated++;
+ }
+
+ // Now, restore the proper ordering so we can build the artifactId.
+ Collections.reverse( avceTokenList );
+
+ // if we didn't find a version, then punt. Use the last token
+ // as the version, and set the classifier empty.
+ if ( versionBuffer.length() < 1 )
+ {
+ if ( avceTokenList.size() > 1 )
+ {
+ int lastIdx = avceTokenList.size() - 1;
+
+ versionBuffer.append( avceTokenList.get( lastIdx ) );
+ avceTokenList.remove( lastIdx );
+ }
+
+ classifierBuffer.setLength( 0 );
+ }
+ else
+ {
+ // if everything is kosher, then pop off all the classifier and
+ // version tokens, leaving the naked artifact id in the list.
+ avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
+ }
+
+ StringBuffer artifactIdBuffer = new StringBuffer();
+
+ firstToken = true;
+ for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
+ {
+ String token = (String) it.next();
+
+ if ( firstToken )
+ {
+ firstToken = false;
+ }
+ else
+ {
+ artifactIdBuffer.append( '-' );
+ }
+
+ artifactIdBuffer.append( token );
+ }
+
+ String artifactId = artifactIdBuffer.toString();
+
+ if ( artifactId.length() > 0 )
+ {
+ int lastVersionCharIdx = versionBuffer.length() - 1;
+ if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
+ {
+ versionBuffer.setLength( lastVersionCharIdx );
+ }
+
+ String version = versionBuffer.toString();
+
+ if ( version.length() > 0 )
+ {
+ if ( classifierBuffer.length() > 0 )
+ {
+ result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
+ classifierBuffer.toString() );
+ }
+ else
+ {
+ result = artifactFactory.createArtifact( groupId, artifactId, version,
+ Artifact.SCOPE_RUNTIME, type );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path filename version is empty" );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path filename artifactId is empty" );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
+ }
+ }
+ else
+ {
+ throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
+ }
+
+ return result;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+import java.util.List;
+
+/**
+ * Interface for discovering metadata files.
+ */
+public interface MetadataDiscoverer
+ extends Discoverer
+{
+ String ROLE = MetadataDiscoverer.class.getName();
+
+ /**
+ * Search for metadata files in the repository.
+ *
+ * @param repository The repository.
+ * @param operation the operation being performed (used for timestamp comparison)
+ * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
+ * @return the list of artifacts found
+ */
+ List discoverMetadata( ArtifactRepository repository, String operation, String blacklistedPatterns )
+ throws DiscovererException;
+}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.util.xml.Xpp3Dom;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * Base class for artifact discoverers.
- *
- * @author John Casey
- * @author Brett Porter
- */
-public abstract class AbstractArtifactDiscoverer
- extends AbstractDiscoverer
- implements ArtifactDiscoverer
-{
- /**
- * Standard patterns to exclude from discovery as they are not artifacts.
- */
- private static final String[] STANDARD_DISCOVERY_EXCLUDES = {"bin/**", "reports/**", ".maven/**", "**/*.md5",
- "**/*.MD5", "**/*.sha1", "**/*.SHA1", "**/*snapshot-version", "*/website/**", "*/licenses/**", "*/licences/**",
- "**/.htaccess", "**/*.html", "**/*.asc", "**/*.txt", "**/*.xml", "**/README*", "**/CHANGELOG*", "**/KEYS*"};
-
- private List scanForArtifactPaths( File repositoryBase, String blacklistedPatterns, long comparisonTimestamp )
- {
- return scanForArtifactPaths( repositoryBase, blacklistedPatterns, null, STANDARD_DISCOVERY_EXCLUDES,
- comparisonTimestamp );
- }
-
- public List discoverArtifacts( ArtifactRepository repository, String operation, String blacklistedPatterns,
- boolean includeSnapshots )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- Xpp3Dom dom = getLastArtifactDiscoveryDom( readRepositoryMetadataDom( repository ) );
- long comparisonTimestamp = readComparisonTimestamp( repository, operation, dom );
-
- // Note that last checked time is deliberately set to the start of the process so that anything added
- // mid-discovery and missed by the scanner will get checked next time.
- // Due to this, there must be no negative side-effects of discovering something twice.
- Date newLastCheckedTime = new Date();
-
- File repositoryBase = new File( repository.getBasedir() );
-
- List artifacts = new ArrayList();
-
- List artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns, comparisonTimestamp );
-
- // Also note that the last check time, while set at the start, is saved at the end, so that if any exceptions
- // occur, then the timestamp is not updated so that the discovery is attempted again
- // TODO: under the list-return behaviour we have now, exceptions might occur later and the timestamp will not be reset - see MRM-83
- try
- {
- setLastCheckedTime( repository, operation, newLastCheckedTime );
- }
- catch ( IOException e )
- {
- throw new DiscovererException( "Error writing metadata: " + e.getMessage(), e );
- }
-
- for ( Iterator i = artifactPaths.iterator(); i.hasNext(); )
- {
- String path = (String) i.next();
-
- try
- {
- Artifact artifact = buildArtifactFromPath( path, repository );
-
- if ( includeSnapshots || !artifact.isSnapshot() )
- {
- artifacts.add( artifact );
- }
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( path, e.getMessage() );
- }
- }
-
- return artifacts;
- }
-
- /**
- * Returns an artifact object that is represented by the specified path in a repository
- *
- * @param path The path that is pointing to an artifact
- * @param repository The repository of the artifact
- * @return Artifact
- * @throws DiscovererException when the specified path does correspond to an artifact
- */
- public Artifact buildArtifactFromPath( String path, ArtifactRepository repository )
- throws DiscovererException
- {
- Artifact artifact = buildArtifact( path );
-
- if ( artifact != null )
- {
- artifact.setRepository( repository );
- artifact.setFile( new File( repository.getBasedir(), path ) );
- }
-
- return artifact;
- }
-
- public void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
- throws IOException
- {
- // see notes in resetLastCheckedTime
-
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
-
- Xpp3Dom dom = readDom( file );
-
- String dateString = new SimpleDateFormat( DATE_FMT, Locale.US ).format( date );
-
- setEntry( getLastArtifactDiscoveryDom( dom ), operation, dateString );
-
- saveDom( file, dom );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.DirectoryScanner;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.StringUtils;
-import org.codehaus.plexus.util.xml.Xpp3Dom;
-import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
-import org.codehaus.plexus.util.xml.Xpp3DomWriter;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * Base class for the artifact and metadata discoverers.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDiscoverer
- extends AbstractLogEnabled
- implements Discoverer
-{
- private List kickedOutPaths = new ArrayList();
-
- /**
- * @plexus.requirement
- */
- protected ArtifactFactory artifactFactory;
-
- private static final String[] EMPTY_STRING_ARRAY = new String[0];
-
- private List excludedPaths = new ArrayList();
-
- /**
- * @plexus.configuration default-value="60000"
- */
- private int blackoutPeriod;
-
- protected static final String DATE_FMT = "yyyyMMddHHmmss";
-
- /**
- * Add a path to the list of files that were kicked out due to being invalid.
- *
- * @param path the path to add
- * @param reason the reason why the path is being kicked out
- */
- protected void addKickedOutPath( String path, String reason )
- {
- kickedOutPaths.add( new DiscovererPath( path, reason ) );
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were found to not represent a searched object
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getKickedOutPathsIterator()
- {
- return kickedOutPaths.iterator();
- }
-
- protected List scanForArtifactPaths( File repositoryBase, String blacklistedPatterns, String[] includes,
- String[] excludes, long comparisonTimestamp )
- {
- List allExcludes = new ArrayList();
- allExcludes.addAll( FileUtils.getDefaultExcludesAsList() );
- if ( excludes != null )
- {
- allExcludes.addAll( Arrays.asList( excludes ) );
- }
-
- if ( !StringUtils.isEmpty( blacklistedPatterns ) )
- {
- allExcludes.addAll( Arrays.asList( blacklistedPatterns.split( "," ) ) );
- }
-
- DirectoryScanner scanner = new DirectoryScanner();
- scanner.setBasedir( repositoryBase );
- if ( includes != null )
- {
- scanner.setIncludes( includes );
- }
- scanner.setExcludes( (String[]) allExcludes.toArray( EMPTY_STRING_ARRAY ) );
-
- // TODO: Correct for extremely large repositories (artifact counts over 200,000 entries)
- scanner.scan();
-
- for ( Iterator files = Arrays.asList( scanner.getExcludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- excludedPaths.add( new DiscovererPath( path, "Artifact was in the specified list of exclusions" ) );
- }
-
- // TODO: this could be a part of the scanner
- List includedPaths = new ArrayList();
- for ( Iterator files = Arrays.asList( scanner.getIncludedFiles() ).iterator(); files.hasNext(); )
- {
- String path = files.next().toString();
-
- long modTime = new File( repositoryBase, path ).lastModified();
- if ( modTime < System.currentTimeMillis() - blackoutPeriod )
- {
- if ( modTime > comparisonTimestamp )
- {
- includedPaths.add( path );
- }
- }
- }
-
- return includedPaths;
- }
-
- /**
- * Returns an iterator for the list if DiscovererPaths that were not processed because they are explicitly excluded
- *
- * @return Iterator for the DiscovererPath List
- */
- public Iterator getExcludedPathsIterator()
- {
- return excludedPaths.iterator();
- }
-
- protected long readComparisonTimestamp( ArtifactRepository repository, String operation, Xpp3Dom dom )
- {
- Xpp3Dom entry = dom.getChild( operation );
- long comparisonTimestamp = 0;
- if ( entry != null )
- {
- try
- {
- comparisonTimestamp = new SimpleDateFormat( DATE_FMT, Locale.US ).parse( entry.getValue() ).getTime();
- }
- catch ( ParseException e )
- {
- getLogger().error( "Timestamp was invalid: " + entry.getValue() + "; ignoring" );
- }
- }
- return comparisonTimestamp;
- }
-
- protected Xpp3Dom readDom( File file )
- {
- Xpp3Dom dom;
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( file );
- dom = Xpp3DomBuilder.build( fileReader );
- }
- catch ( FileNotFoundException e )
- {
- // Safe to ignore
- dom = new Xpp3Dom( "metadata" );
- }
- catch ( XmlPullParserException e )
- {
- getLogger().error( "Error reading metadata (ignoring and recreating): " + e.getMessage() );
- dom = new Xpp3Dom( "metadata" );
- }
- catch ( IOException e )
- {
- getLogger().error( "Error reading metadata (ignoring and recreating): " + e.getMessage() );
- dom = new Xpp3Dom( "metadata" );
- }
- finally
- {
- IOUtil.close( fileReader );
- }
- return dom;
- }
-
- protected Xpp3Dom getLastArtifactDiscoveryDom( Xpp3Dom dom )
- {
- Xpp3Dom lastDiscoveryDom = dom.getChild( "lastArtifactDiscovery" );
- if ( lastDiscoveryDom == null )
- {
- dom.addChild( new Xpp3Dom( "lastArtifactDiscovery" ) );
- lastDiscoveryDom = dom.getChild( "lastArtifactDiscovery" );
- }
- return lastDiscoveryDom;
- }
-
- protected Xpp3Dom getLastMetadataDiscoveryDom( Xpp3Dom dom )
- {
- Xpp3Dom lastDiscoveryDom = dom.getChild( "lastMetadataDiscovery" );
- if ( lastDiscoveryDom == null )
- {
- dom.addChild( new Xpp3Dom( "lastMetadataDiscovery" ) );
- lastDiscoveryDom = dom.getChild( "lastMetadataDiscovery" );
- }
- return lastDiscoveryDom;
- }
-
- public void resetLastCheckedTime( ArtifactRepository repository, String operation )
- throws IOException
- {
- // TODO: get these changes into maven-metadata.xml and migrate towards that. The model is further diverging to a different layout at each level so submodels might be a good idea.
- // TODO: maven-artifact probably needs an improved pathOfMetadata to cope with top level metadata
- // TODO: might we need to write this as maven-metadata-local in some circumstances? merge others? Probably best to keep it simple and just use this format at the root. No need to merge anything that I can see
- // TODO: since this metadata isn't meant to be shared, perhaps another file is called for after all.
- // Format is: <repository><lastDiscovery><KEY>yyyyMMddHHmmss</KEY></lastDiscovery></repository> (ie, flat properties)
-
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
-
- Xpp3Dom dom = readDom( file );
-
- boolean changed = false;
-
- if ( removeEntry( getLastArtifactDiscoveryDom( dom ), operation ) )
- {
- changed = true;
- }
-
- if ( removeEntry( getLastMetadataDiscoveryDom( dom ), operation ) )
- {
- changed = true;
- }
-
- if ( changed )
- {
- saveDom( file, dom );
- }
- }
-
- private boolean removeEntry( Xpp3Dom lastDiscoveryDom, String operation )
- {
- boolean changed = false;
-
- // do this in reverse so that removing doesn't affect counter
- Xpp3Dom[] children = lastDiscoveryDom.getChildren();
- for ( int i = lastDiscoveryDom.getChildCount() - 1; i >= 0; i-- )
- {
- if ( children[i].getName().equals( operation ) )
- {
- changed = true;
- lastDiscoveryDom.removeChild( i );
- }
- }
- return changed;
- }
-
- protected void saveDom( File file, Xpp3Dom dom )
- throws IOException
- {
- FileWriter writer = new FileWriter( file );
-
- // save metadata
- try
- {
- Xpp3DomWriter.write( writer, dom );
- }
- finally
- {
- IOUtil.close( writer );
- }
- }
-
- protected void setEntry( Xpp3Dom lastDiscoveryDom, String operation, String dateString )
- {
- Xpp3Dom entry = lastDiscoveryDom.getChild( operation );
- if ( entry == null )
- {
- entry = new Xpp3Dom( operation );
- lastDiscoveryDom.addChild( entry );
- }
- entry.setValue( dateString );
- }
-
- protected Xpp3Dom readRepositoryMetadataDom( ArtifactRepository repository )
- {
- return readDom( new File( repository.getBasedir(), "maven-metadata.xml" ) );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for implementation that can discover artifacts within a repository.
- *
- * @author John Casey
- * @author Brett Porter
- * @todo do we want blacklisted patterns in another form? Part of the object construction?
- * @todo should includeSnapshots be configuration on the component? If not, should the methods be changed to include alternates for both possibilities (discoverReleaseArtifacts, discoverReleaseAndSnapshotArtifacts)?
- * @todo instead of a returned list, should a listener be passed in?
- */
-public interface ArtifactDiscoverer
- extends Discoverer
-{
- String ROLE = ArtifactDiscoverer.class.getName();
-
- /**
- * Discover artifacts in the repository. Only artifacts added since the last attempt at discovery will be found.
- * This process guarantees never to miss an artifact, however it is possible that an artifact will be received twice
- * consecutively even if unchanged, so any users of this list must handle such a situation gracefully.
- *
- * @param repository the location of the repository
- * @param operation the operation being used to discover for timestamp checking
- * @param blacklistedPatterns pattern that lists any files to prevent from being included when scanning
- * @param includeSnapshots whether to discover snapshots
- * @return the list of artifacts discovered
- * @throws DiscovererException if there was an unrecoverable problem discovering artifacts or recording progress
- */
- List discoverArtifacts( ArtifactRepository repository, String operation, String blacklistedPatterns,
- boolean includeSnapshots )
- throws DiscovererException;
-
- /**
- * Build an artifact from a path in the repository
- *
- * @param path the path
- * @return the artifact
- * @throws DiscovererException if the file is not a valid artifact
- * @todo this should be in maven-artifact
- */
- Artifact buildArtifact( String path )
- throws DiscovererException;
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the new repository layout (Maven 2.0+).
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.repository.discovery.ArtifactDiscoverer" role-hint="default"
- */
-public class DefaultArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.repository.discovery.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( path, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
-
- Artifact artifact;
- if ( pathParts.size() >= 4 )
- {
- // maven 2.x path
-
- // the actual artifact filename.
- String filename = (String) pathParts.remove( 0 );
-
- // the next one is the version.
- String version = (String) pathParts.remove( 0 );
-
- // the next one is the artifactId.
- String artifactId = (String) pathParts.remove( 0 );
-
- // the remaining are the groupId.
- Collections.reverse( pathParts );
- String groupId = StringUtils.join( pathParts.iterator(), "." );
-
- String remainingFilename = filename;
- if ( remainingFilename.startsWith( artifactId + "-" ) )
- {
- remainingFilename = remainingFilename.substring( artifactId.length() + 1 );
-
- String classifier = null;
-
- // TODO: use artifact handler, share with legacy discoverer
- String type;
- if ( remainingFilename.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - ".tar.gz".length() );
- }
- else if ( remainingFilename.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
- remainingFilename = remainingFilename.substring( 0, remainingFilename.length() - ".zip".length() );
- }
- else if ( remainingFilename.endsWith( "-sources.jar" ) )
- {
- type = "java-source";
- classifier = "sources";
- remainingFilename =
- remainingFilename.substring( 0, remainingFilename.length() - "-sources.jar".length() );
- }
- else
- {
- int index = remainingFilename.lastIndexOf( "." );
- if ( index >= 0 )
- {
- type = remainingFilename.substring( index + 1 );
- remainingFilename = remainingFilename.substring( 0, index );
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- Artifact result;
- if ( classifier == null )
- {
- result =
- artifactFactory.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME, type );
- }
- else
- {
- result =
- artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-
- if ( result.isSnapshot() )
- {
- // version is *-SNAPSHOT, filename is *-yyyyMMdd.hhmmss-b
- int classifierIndex = remainingFilename.indexOf( '-', version.length() + 8 );
- if ( classifierIndex >= 0 )
- {
- classifier = remainingFilename.substring( classifierIndex + 1 );
- remainingFilename = remainingFilename.substring( 0, classifierIndex );
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, remainingFilename,
- type, classifier );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, remainingFilename,
- Artifact.SCOPE_RUNTIME, type );
- }
-
- // poor encapsulation requires we do this to populate base version
- if ( !result.isSnapshot() )
- {
- throw new DiscovererException( "Failed to create a snapshot artifact: " + result );
- }
- else if ( !result.getBaseVersion().equals( version ) )
- {
- throw new DiscovererException(
- "Built snapshot artifact base version does not match path version: " + result +
- "; should have been version: " + version );
- }
- else
- {
- artifact = result;
- }
- }
- else if ( !remainingFilename.startsWith( version ) )
- {
- throw new DiscovererException( "Built artifact version does not match path version" );
- }
- else if ( !remainingFilename.equals( version ) )
- {
- if ( remainingFilename.charAt( version.length() ) == '-' )
- {
- classifier = remainingFilename.substring( version.length() + 1 );
- artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, type,
- classifier );
- }
- else
- {
- throw new DiscovererException( "Path version does not corresspond to an artifact version" );
- }
- }
- else
- {
- artifact = result;
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not correspond to an artifact" );
- }
- }
- else
- {
- throw new DiscovererException( "Path is too short to build an artifact from" );
- }
-
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.StringUtils;
-import org.codehaus.plexus.util.xml.Xpp3Dom;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.StringTokenizer;
-
-/**
- * This class gets all the paths that contain the metadata files.
- *
- * @plexus.component role="org.apache.maven.repository.discovery.MetadataDiscoverer" role-hint="default"
- */
-public class DefaultMetadataDiscoverer
- extends AbstractDiscoverer
- implements MetadataDiscoverer
-{
- /**
- * Standard patterns to include in discovery of metadata files.
- *
- * @todo Note that only the remote format is supported at this time: you cannot search local repository metadata due
- * to the way it is later loaded in the searchers. Review code using pathOfRemoteMetadata. IS there any value in
- * searching the local metadata in the first place though?
- */
- private static final String[] STANDARD_DISCOVERY_INCLUDES = {"**/maven-metadata.xml"};
-
- public List discoverMetadata( ArtifactRepository repository, String operation, String blacklistedPatterns )
- throws DiscovererException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- throw new UnsupportedOperationException( "Only filesystem repositories are supported" );
- }
-
- Xpp3Dom dom = getLastMetadataDiscoveryDom( readRepositoryMetadataDom( repository ) );
- long comparisonTimestamp = readComparisonTimestamp( repository, operation, dom );
-
- // Note that last checked time is deliberately set to the start of the process so that anything added
- // mid-discovery and missed by the scanner will get checked next time.
- // Due to this, there must be no negative side-effects of discovering something twice.
- Date newLastCheckedTime = new Date();
-
- List metadataFiles = new ArrayList();
- List metadataPaths = scanForArtifactPaths( new File( repository.getBasedir() ), blacklistedPatterns,
- STANDARD_DISCOVERY_INCLUDES, null, comparisonTimestamp );
-
- // Also note that the last check time, while set at the start, is saved at the end, so that if any exceptions
- // occur, then the timestamp is not updated so that the discovery is attempted again
- // TODO: under the list-return behaviour we have now, exceptions might occur later and the timestamp will not be reset - see MRM-83
- try
- {
- setLastCheckedTime( repository, operation, newLastCheckedTime );
- }
- catch ( IOException e )
- {
- throw new DiscovererException( "Error writing metadata: " + e.getMessage(), e );
- }
-
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- String metadataPath = (String) i.next();
- try
- {
- RepositoryMetadata metadata = buildMetadata( repository.getBasedir(), metadataPath );
- metadataFiles.add( metadata );
- }
- catch ( DiscovererException e )
- {
- addKickedOutPath( metadataPath, e.getMessage() );
- }
- }
-
- return metadataFiles;
- }
-
- private RepositoryMetadata buildMetadata( String repo, String metadataPath )
- throws DiscovererException
- {
- Metadata m;
- String repoPath = repo + "/" + metadataPath;
- try
- {
- URL url = new File( repoPath ).toURI().toURL();
- InputStream is = url.openStream();
- Reader reader = new InputStreamReader( is );
- MetadataXpp3Reader metadataReader = new MetadataXpp3Reader();
-
- m = metadataReader.read( reader );
- }
- catch ( XmlPullParserException e )
- {
- throw new DiscovererException( "Error parsing metadata file '" + repoPath + "': " + e.getMessage(), e );
- }
- catch ( MalformedURLException e )
- {
- // shouldn't happen
- throw new DiscovererException( "Error constructing metadata file '" + repoPath + "': " + e.getMessage(),
- e );
- }
- catch ( IOException e )
- {
- throw new DiscovererException( "Error reading metadata file '" + repoPath + "': " + e.getMessage(), e );
- }
-
- RepositoryMetadata repositoryMetadata = buildMetadata( m, metadataPath );
-
- if ( repositoryMetadata == null )
- {
- throw new DiscovererException( "Unable to build a repository metadata from path" );
- }
-
- return repositoryMetadata;
- }
-
- /**
- * Builds a RepositoryMetadata object from a Metadata object and its path.
- *
- * @param m Metadata
- * @param metadataPath path
- * @return RepositoryMetadata if the parameters represent one; null if not
- * @todo should we just be using the path information, and loading it later when it is needed? (for reporting, etc)
- */
- private RepositoryMetadata buildMetadata( Metadata m, String metadataPath )
- {
- String metaGroupId = m.getGroupId();
- String metaArtifactId = m.getArtifactId();
- String metaVersion = m.getVersion();
-
- // check if the groupId, artifactId and version is in the
- // metadataPath
- // parse the path, in reverse order
- List pathParts = new ArrayList();
- StringTokenizer st = new StringTokenizer( metadataPath, "/\\" );
- while ( st.hasMoreTokens() )
- {
- pathParts.add( st.nextToken() );
- }
-
- Collections.reverse( pathParts );
- // remove the metadata file
- pathParts.remove( 0 );
- Iterator it = pathParts.iterator();
- String tmpDir = (String) it.next();
-
- Artifact artifact = null;
- if ( !StringUtils.isEmpty( metaVersion ) )
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, metaVersion );
- }
-
- // snapshotMetadata
- RepositoryMetadata metadata = null;
- if ( tmpDir != null && tmpDir.equals( metaVersion ) )
- {
- if ( artifact != null )
- {
- metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- }
- }
- else if ( tmpDir != null && tmpDir.equals( metaArtifactId ) )
- {
- // artifactMetadata
- if ( artifact != null )
- {
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- else
- {
- artifact = artifactFactory.createProjectArtifact( metaGroupId, metaArtifactId, "1.0" );
- metadata = new ArtifactRepositoryMetadata( artifact );
- }
- }
- else
- {
- String groupDir = "";
- int ctr = 0;
- for ( it = pathParts.iterator(); it.hasNext(); )
- {
- String path = (String) it.next();
- if ( ctr == 0 )
- {
- groupDir = path;
- }
- else
- {
- groupDir = path + "." + groupDir;
- }
- ctr++;
- }
-
- // groupMetadata
- if ( metaGroupId != null && metaGroupId.equals( groupDir ) )
- {
- metadata = new GroupRepositoryMetadata( metaGroupId );
- }
- }
-
- return metadata;
- }
-
- public void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
- throws IOException
- {
- // see notes in resetLastCheckedTime
-
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
-
- Xpp3Dom dom = readDom( file );
-
- String dateString = new SimpleDateFormat( DATE_FMT, Locale.US ).format( date );
-
- setEntry( getLastMetadataDiscoveryDom( dom ), operation, dateString );
-
- saveDom( file, dom );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.io.IOException;
-import java.util.Date;
-import java.util.Iterator;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public interface Discoverer
-{
- /**
- * Get the list of paths kicked out during the discovery process.
- *
- * @return the paths as Strings.
- */
- Iterator getKickedOutPathsIterator();
-
- /**
- * Get the list of paths excluded during the discovery process.
- *
- * @return the paths as Strings.
- */
- Iterator getExcludedPathsIterator();
-
- /**
- * Reset the time in the repository that indicates the last time a check was performed.
- *
- * @param repository the location of the repository
- * @param operation the operation to record the timestamp for
- * @throws java.io.IOException if there is a non-recoverable problem reading or writing the metadata
- */
- void resetLastCheckedTime( ArtifactRepository repository, String operation )
- throws IOException;
-
- /**
- * Set the time in the repository that indicates the last time a check was performed.
- *
- * @param repository the location of the repository
- * @param operation the operation to record the timestamp for
- * @param date the date to set the last check to
- * @throws java.io.IOException if there is a non-recoverable problem reading or writing the metadata
- */
- void setLastCheckedTime( ArtifactRepository repository, String operation, Date date )
- throws IOException;
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererException
- extends Exception
-{
- public DiscovererException( String message )
- {
- super( message );
- }
-
- public DiscovererException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DiscovererPath
-{
- /**
- * The path discovered.
- */
- private final String path;
-
- /**
- * A comment about why the path is being processed.
- */
- private final String comment;
-
- public DiscovererPath( String path, String comment )
- {
- this.path = path;
- this.comment = comment;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public String getComment()
- {
- return comment;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.StringTokenizer;
-
-/**
- * Artifact discoverer for the legacy repository layout (Maven 1.x).
- * Method used to build an artifact object using a relative path from a repository base directory. An artifactId
- * having the words "DEV", "PRE", "RC", "ALPHA", "BETA", "DEBUG", "UNOFFICIAL", "CURRENT", "LATEST", "FCS",
- * "RELEASE", "NIGHTLY", "SNAPSHOT" and "TEST" (not case-sensitive) will most likely make this method fail as
- * they are reserved for version usage.
- *
- * @author John Casey
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.repository.discovery.ArtifactDiscoverer" role-hint="legacy"
- */
-public class LegacyArtifactDiscoverer
- extends AbstractArtifactDiscoverer
-{
- /**
- * @see org.apache.maven.repository.discovery.ArtifactDiscoverer#buildArtifact(String)
- */
- public Artifact buildArtifact( String path )
- throws DiscovererException
- {
- StringTokenizer tokens = new StringTokenizer( path, "/\\" );
-
- Artifact result;
-
- int numberOfTokens = tokens.countTokens();
-
- if ( numberOfTokens == 3 )
- {
- String groupId = tokens.nextToken();
-
- String type = tokens.nextToken();
-
- if ( type.endsWith( "s" ) )
- {
- type = type.substring( 0, type.length() - 1 );
-
- // contains artifactId, version, classifier, and extension.
- String avceGlob = tokens.nextToken();
-
- //noinspection CollectionDeclaredAsConcreteClass
- LinkedList avceTokenList = new LinkedList();
-
- StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
- while ( avceTokenizer.hasMoreTokens() )
- {
- avceTokenList.addLast( avceTokenizer.nextToken() );
- }
-
- String lastAvceToken = (String) avceTokenList.removeLast();
-
- // TODO: share with other discoverer, use artifact handlers instead
- if ( lastAvceToken.endsWith( ".tar.gz" ) )
- {
- type = "distribution-tgz";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( "sources.jar" ) )
- {
- type = "java-source";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".jar".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else if ( lastAvceToken.endsWith( ".zip" ) )
- {
- type = "distribution-zip";
-
- lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- int extPos = lastAvceToken.lastIndexOf( '.' );
-
- if ( extPos > 0 )
- {
- String ext = lastAvceToken.substring( extPos + 1 );
- if ( type.equals( ext ) )
- {
- lastAvceToken = lastAvceToken.substring( 0, extPos );
-
- avceTokenList.addLast( lastAvceToken );
- }
- else
- {
- throw new DiscovererException( "Path type does not match the extension" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename does not have an extension" );
- }
- }
-
- // let's discover the version, and whatever's leftover will be either
- // a classifier, or part of the artifactId, depending on position.
- // Since version is at the end, we have to move in from the back.
- Collections.reverse( avceTokenList );
-
- // TODO: this is obscene - surely a better way?
- String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|" +
- "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|" +
- "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|" +
- "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|" +
- "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|" +
- "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|" +
- "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])|" + "[Ff][Ii][Nn][Aa][Ll]|" + "([AaBb][_.0-9]*)";
-
- StringBuffer classifierBuffer = new StringBuffer();
- StringBuffer versionBuffer = new StringBuffer();
-
- boolean firstVersionTokenEncountered = false;
- boolean firstToken = true;
-
- int tokensIterated = 0;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- boolean tokenIsVersionPart = token.matches( validVersionParts );
-
- StringBuffer bufferToUpdate;
-
- // NOTE: logic in code is reversed, since we're peeling off the back
- // Any token after the last versionPart will be in the classifier.
- // Any token UP TO first non-versionPart is part of the version.
- if ( !tokenIsVersionPart )
- {
- if ( firstVersionTokenEncountered )
- {
- //noinspection BreakStatement
- break;
- }
- else
- {
- bufferToUpdate = classifierBuffer;
- }
- }
- else
- {
- firstVersionTokenEncountered = true;
-
- bufferToUpdate = versionBuffer;
- }
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- bufferToUpdate.insert( 0, '-' );
- }
-
- bufferToUpdate.insert( 0, token );
-
- tokensIterated++;
- }
-
- // Now, restore the proper ordering so we can build the artifactId.
- Collections.reverse( avceTokenList );
-
- // if we didn't find a version, then punt. Use the last token
- // as the version, and set the classifier empty.
- if ( versionBuffer.length() < 1 )
- {
- if ( avceTokenList.size() > 1 )
- {
- int lastIdx = avceTokenList.size() - 1;
-
- versionBuffer.append( avceTokenList.get( lastIdx ) );
- avceTokenList.remove( lastIdx );
- }
-
- classifierBuffer.setLength( 0 );
- }
- else
- {
- // if everything is kosher, then pop off all the classifier and
- // version tokens, leaving the naked artifact id in the list.
- avceTokenList =
- new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - tokensIterated ) );
- }
-
- StringBuffer artifactIdBuffer = new StringBuffer();
-
- firstToken = true;
- for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
- {
- String token = (String) it.next();
-
- if ( firstToken )
- {
- firstToken = false;
- }
- else
- {
- artifactIdBuffer.append( '-' );
- }
-
- artifactIdBuffer.append( token );
- }
-
- String artifactId = artifactIdBuffer.toString();
-
- if ( artifactId.length() > 0 )
- {
- int lastVersionCharIdx = versionBuffer.length() - 1;
- if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
- {
- versionBuffer.setLength( lastVersionCharIdx );
- }
-
- String version = versionBuffer.toString();
-
- if ( version.length() > 0 )
- {
- if ( classifierBuffer.length() > 0 )
- {
- result = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version,
- type,
- classifierBuffer.toString() );
- }
- else
- {
- result = artifactFactory.createArtifact( groupId, artifactId, version,
- Artifact.SCOPE_RUNTIME, type );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename version is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path filename artifactId is empty" );
- }
- }
- else
- {
- throw new DiscovererException( "Path artifact type does not corresspond to an artifact type" );
- }
- }
- else
- {
- throw new DiscovererException( "Path does not match a legacy repository path for an artifact" );
- }
-
- return result;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Interface for discovering metadata files.
- */
-public interface MetadataDiscoverer
- extends Discoverer
-{
- String ROLE = MetadataDiscoverer.class.getName();
-
- /**
- * Search for metadata files in the repository.
- *
- * @param repository The repository.
- * @param operation the operation being performed (used for timestamp comparison)
- * @param blacklistedPatterns Patterns that are to be excluded from the discovery process.
- * @return the list of artifacts found
- */
- List discoverMetadata( ArtifactRepository repository, String operation, String blacklistedPatterns )
- throws DiscovererException;
-}
There are two plexus components available:
- * {{{../apidocs/org/apache/maven/repository/discovery/ArtifactDiscoverer.html} ArtifactDiscoverer}}
+ * {{{../apidocs/org/apache/maven/archiva/discoverer/ArtifactDiscoverer.html} ArtifactDiscoverer}}
- * {{{../apidocs/org/apache/maven/repository/discovery/MetadataDiscoverer.html} MetadataDiscoverer}}
+ * {{{../apidocs/org/apache/maven/archiva/discoverer/MetadataDiscoverer.html} MetadataDiscoverer}}
Each of these components currently have an implementation for the both <<<legacy>>> and <<<default>>> repository
layouts.
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * @author Edwin Punzalan
+ */
+public abstract class AbstractArtifactDiscovererTest
+ extends PlexusTestCase
+{
+ protected ArtifactDiscoverer discoverer;
+
+ private ArtifactFactory factory;
+
+ protected ArtifactRepository repository;
+
+ protected static final String TEST_OPERATION = "test";
+
+ protected abstract String getLayout();
+
+ protected abstract File getRepositoryFile();
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
+
+ factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ repository = getRepository();
+
+ removeTimestampMetadata();
+ }
+
+ protected ArtifactRepository getRepository()
+ throws Exception
+ {
+ File basedir = getRepositoryFile();
+
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout =
+ (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
+
+ return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version )
+ {
+ Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
+ {
+ return factory.createArtifact( groupId, artifactId, version, null, type );
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
+ String classifier )
+ {
+ return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
+ }
+
+ public void testUpdatedInRepository()
+ throws ComponentLookupException, DiscovererException, ParseException, IOException
+ {
+ // Set repository time to 1-1-2000, a time in the distant past so definitely updated
+ discoverer.setLastCheckedTime( repository, "update",
+ new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" ) );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
+ }
+
+ public void testNotUpdatedInRepository()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ // Set repository time to now, which is after any artifacts, so definitely not updated
+ discoverer.setLastCheckedTime( repository, "update", new Date() );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscovery()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+ }
+
+ public void testUpdatedInRepositoryBlackout()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ Artifact artifact = createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" );
+ artifact.getFile().setLastModified( System.currentTimeMillis() );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included", artifacts.contains( artifact ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included", artifacts.contains( artifact ) );
+ }
+
+ public void testUpdatedInRepositoryNotBlackout()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ Artifact artifact = createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" );
+ artifact.getFile().setLastModified( System.currentTimeMillis() - 61000 );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included", artifacts.contains( artifact ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included", artifacts.contains( artifact ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscoveryMetadataAlreadyExists()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.setLastCheckedTime( repository, "update", new Date() );
+
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscoveryOtherMetadataAlreadyExists()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.setLastCheckedTime( repository, "test", new Date() );
+
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+
+ // try again with the updated timestamp
+ artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertFalse( "Check not included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
+ }
+
+ public void testNoRepositoryMetadata()
+ throws ComponentLookupException, DiscovererException, ParseException, IOException
+ {
+ removeTimestampMetadata();
+
+ // should find all
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check included",
+ artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
+ }
+
+ private void removeTimestampMetadata()
+ {
+ // remove the metadata that tracks time
+ File file = new File( repository.getBasedir(), "maven-metadata.xml" );
+ file.delete();
+ assertFalse( file.exists() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test the default artifact discoverer.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ * @todo test location of poms, checksums
+ */
+public class DefaultArtifactDiscovererTest
+ extends AbstractArtifactDiscovererTest
+{
+
+ protected String getLayout()
+ {
+ return "default";
+ }
+
+ protected File getRepositoryFile()
+ {
+ return getTestFile( "src/test/repository" );
+ }
+
+ public void testDefaultExcludes()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ boolean b = path.indexOf( ".svn" ) >= 0;
+ if ( b )
+ {
+ found = true;
+ assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+ }
+ }
+
+ public void testStandardExcludes()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "KEYS".equals( path ) )
+ {
+ found = true;
+ assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testBlacklistedExclude()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, "javax/**", false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ }
+
+ public void testKickoutWithShortPath()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
+ dPath.getComment() );
+
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithWrongArtifactId()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
+ path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not wrong jar",
+ "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithNoType()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path filename does not have an extension",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithWrongVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithLongerVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithWrongSnapshotVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout",
+ "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithSnapshotBaseVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
+ path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout",
+ "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
+ "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
+ }
+
+ public void testArtifactWithClassifier()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included",
+ artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
+ }
+
+ public void testJavaSourcesInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains(
+ createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
+ }
+
+ public void testDistributionInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check zip included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
+
+ assertTrue( "Check tar.gz included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
+ }
+
+ public void testSnapshotInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ assertTrue( "Check snapshot included",
+ artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
+ }
+
+ public void testSnapshotInclusionWithClassifier()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check snapshot included", artifacts.contains(
+ createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
+ }
+
+ public void testSnapshotExclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ assertFalse( "Check snapshot included",
+ artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
+ }
+
+ public void testFileSet()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ assertNotNull( "Check file is set", artifact.getFile() );
+ }
+ }
+
+ public void testRepositorySet()
+ throws MalformedURLException, DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ String url = repository.getUrl();
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ assertNotNull( "Check repository set", artifact.getRepository() );
+ assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
+ }
+ }
+
+ public void testStandalonePoms()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+
+ // cull down to actual artifacts (only standalone poms will have type = pom)
+ Map keyedArtifacts = new HashMap();
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
+ if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
+ {
+ keyedArtifacts.put( key, a );
+ }
+ }
+
+ List models = new ArrayList();
+
+ for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+
+ if ( "pom".equals( a.getType() ) )
+ {
+ models.add( a );
+ }
+ }
+
+ assertEquals( 4, models.size() );
+
+ // Define order we expect
+ Collections.sort( models );
+
+ Iterator itr = models.iterator();
+ Artifact model = (Artifact) itr.next();
+ assertEquals( "org.apache.maven", model.getGroupId() );
+ assertEquals( "B", model.getArtifactId() );
+ assertEquals( "1.0", model.getVersion() );
+ model = (Artifact) itr.next();
+ assertEquals( "org.apache.maven", model.getGroupId() );
+ assertEquals( "B", model.getArtifactId() );
+ assertEquals( "2.0", model.getVersion() );
+ model = (Artifact) itr.next();
+ assertEquals( "org.apache.maven", model.getGroupId() );
+ assertEquals( "discovery", model.getArtifactId() );
+ assertEquals( "1.0", model.getVersion() );
+ model = (Artifact) itr.next();
+ assertEquals( "org.apache.testgroup", model.getGroupId() );
+ assertEquals( "discovery", model.getArtifactId() );
+ assertEquals( "1.0", model.getVersion() );
+ }
+
+ public void testShortPath()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
+
+ fail( "Artifact should be null for short paths" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testWrongArtifactId()
+ throws ComponentLookupException
+ {
+
+ try
+ {
+ discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
+
+ fail( "Artifact should be null for wrong ArtifactId" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testNoType()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
+
+ fail( "Artifact should be null for no type" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testWrongVersion()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
+
+ fail( "Artifact should be null for wrong version" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testLongVersion()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
+
+ fail( "Artifact should be null for long version" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testWrongSnapshotVersion()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
+
+ fail( "Artifact should be null for wrong snapshot version" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testSnapshotBaseVersion()
+ throws ComponentLookupException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
+
+ fail( "Artifact should be null for snapshot base version" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testPathWithClassifier()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
+ }
+
+ public void testWithJavaSourceInclusion()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
+ }
+
+ public void testDistributionArtifacts()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
+
+ testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
+
+ artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
+ }
+
+ public void testSnapshot()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
+
+ testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
+
+ artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
+ }
+
+ public void testNormal()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
+ }
+
+ public void testSnapshotWithClassifier()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
+ artifact );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * This class tests the DefaultMetadataDiscoverer class.
+ */
+public class DefaultMetadataDiscovererTest
+ extends PlexusTestCase
+{
+ private MetadataDiscoverer discoverer;
+
+ private static final String TEST_OPERATION = "test";
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory factory;
+
+ /**
+ *
+ */
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
+
+ factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ repository = getRepository();
+
+ removeTimestampMetadata();
+ }
+
+ protected ArtifactRepository getRepository()
+ throws Exception
+ {
+ File basedir = getTestFile( "src/test/repository" );
+
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
+ }
+
+ /**
+ *
+ */
+ public void tearDown()
+ throws Exception
+ {
+ super.tearDown();
+ discoverer = null;
+ }
+
+ /**
+ * Test if metadata file in wrong directory was added to the kickedOutPaths.
+ */
+ public void testKickoutWrongDirectory()
+ throws DiscovererException
+ {
+ discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ Iterator iter = discoverer.getKickedOutPathsIterator();
+ boolean found = false;
+ while ( iter.hasNext() && !found )
+ {
+ DiscovererPath dPath = (DiscovererPath) iter.next();
+ String dir = dPath.getPath();
+
+ String normalizedDir = dir.replace( '\\', '/' );
+ if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( found );
+ }
+
+ /**
+ * Test if blank metadata file was added to the kickedOutPaths.
+ */
+ public void testKickoutBlankMetadata()
+ throws DiscovererException
+ {
+ discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ Iterator iter = discoverer.getKickedOutPathsIterator();
+ boolean found = false;
+ while ( iter.hasNext() && !found )
+ {
+ DiscovererPath dPath = (DiscovererPath) iter.next();
+ String dir = dPath.getPath();
+
+ String normalizedDir = dir.replace( '\\', '/' );
+ if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
+ {
+ found = true;
+ assertTrue( "Check reason for kickout", dPath.getComment().matches(
+ "Error reading metadata file '(.*)': input contained no data" ) );
+ }
+ }
+ assertTrue( found );
+ }
+
+ private void removeTimestampMetadata()
+ throws IOException
+ {
+ // remove the metadata that tracks time
+ File file = new File( repository.getBasedir(), "maven-metadata.xml" );
+ System.gc(); // for Windows
+ file.delete();
+ assertFalse( file.exists() );
+ }
+
+ public void testDiscoverMetadata()
+ throws DiscovererException
+ {
+ List metadataPaths = discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ metadata =
+ new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ metadata = new GroupRepositoryMetadata( "org.apache.maven" );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ public void testUpdatedInRepository()
+ throws ComponentLookupException, DiscovererException, ParseException, IOException
+ {
+ // Set repository time to 1-1-2000, a time in the distant past so definitely updated
+ discoverer.setLastCheckedTime( repository, "update",
+ new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" ) );
+
+ List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-updated" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ // try again with the updated timestamp
+ metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
+ {
+ for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
+ {
+ RepositoryMetadata m = (RepositoryMetadata) i.next();
+
+ if ( m.getGroupId().equals( metadata.getGroupId() ) )
+ {
+ if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
+ {
+ return true;
+ }
+ else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
+ {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ public void testNotUpdatedInRepository()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ // Set repository time to now, which is after any artifacts, so definitely not updated
+ discoverer.setLastCheckedTime( repository, "update", new Date() );
+
+ List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
+ assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscovery()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ // try again with the updated timestamp
+ metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscoveryMetadataAlreadyExists()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.setLastCheckedTime( repository, "update", new Date() );
+
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ // try again with the updated timestamp
+ metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ public void testNotUpdatedInRepositoryForcedDiscoveryOtherMetadataAlreadyExists()
+ throws ComponentLookupException, DiscovererException, IOException
+ {
+ discoverer.setLastCheckedTime( repository, "test", new Date() );
+
+ discoverer.resetLastCheckedTime( repository, "update" );
+
+ List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+
+ // try again with the updated timestamp
+ metadataPaths = discoverer.discoverMetadata( repository, "update", null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ public void testNoRepositoryMetadata()
+ throws ComponentLookupException, DiscovererException, ParseException, IOException
+ {
+ removeTimestampMetadata();
+
+ // should find all
+ List metadataPaths = discoverer.discoverMetadata( repository, TEST_OPERATION, null );
+ assertNotNull( "Check metadata not null", metadataPaths );
+
+ RepositoryMetadata metadata =
+ new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-updated" ) );
+ assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
+ }
+
+ protected Artifact createArtifact( String groupId, String artifactId )
+ {
+ return createArtifact( groupId, artifactId, "1.0" );
+ }
+
+ private Artifact createArtifact( String groupId, String artifactId, String version )
+ {
+ return factory.createArtifact( groupId, artifactId, version, null, "jar" );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.discovery;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Test the legacy artifact discoverer.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public class LegacyArtifactDiscovererTest
+ extends AbstractArtifactDiscovererTest
+{
+ protected String getLayout()
+ {
+ return "legacy";
+ }
+
+ protected File getRepositoryFile()
+ {
+ return getTestFile( "src/test/legacy-repository" );
+ }
+
+ public void testDefaultExcludes()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( path.indexOf( ".svn" ) >= 0 )
+ {
+ found = true;
+ assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
+ }
+ }
+
+ public void testStandardExcludes()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "KEYS".equals( path ) )
+ {
+ found = true;
+ assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testBlacklistedExclude()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, "javax.sql/**", false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check exclusion was found", found );
+
+ assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ }
+
+ public void testKickoutWithShortPath()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout",
+ "Path does not match a legacy repository path for an artifact", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithLongPath()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout",
+ "Path does not match a legacy repository path for an artifact", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithInvalidType()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithNoExtension()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path filename does not have an extension",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithWrongExtension()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path type does not match the extension",
+ dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testKickoutWithNoVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+ boolean found = false;
+ for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
+ {
+ DiscovererPath dPath = (DiscovererPath) i.next();
+
+ String path = dPath.getPath();
+
+ if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
+ {
+ found = true;
+ assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
+ }
+ }
+ assertTrue( "Check kickout was found", found );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact a = (Artifact) i.next();
+ assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
+ }
+ }
+
+ public void testInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
+ }
+
+ public void testTextualVersion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
+ }
+
+ public void testArtifactWithClassifier()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included",
+ artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
+ }
+
+ public void testJavaSourcesInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains(
+ createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
+ }
+
+ public void testDistributionInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check zip included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
+
+ assertTrue( "Check tar.gz included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
+ }
+
+ public void testSnapshotInclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ assertTrue( "Check snapshot included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
+ }
+
+ public void testSnapshotExclusion()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
+ assertFalse( "Check snapshot included",
+ artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
+ }
+
+ public void testFileSet()
+ throws DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ assertNotNull( "Check file is set", artifact.getFile() );
+ }
+ }
+
+ public void testRepositorySet()
+ throws MalformedURLException, DiscovererException
+ {
+ List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
+ assertNotNull( "Check artifacts not null", artifacts );
+
+ String url = repository.getUrl();
+ for ( Iterator i = artifacts.iterator(); i.hasNext(); )
+ {
+ Artifact artifact = (Artifact) i.next();
+ assertNotNull( "Check repository set", artifact.getRepository() );
+ assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
+ }
+ }
+
+ public void testWrongArtifactPackaging()
+ throws ComponentLookupException, DiscovererException
+ {
+ try
+ {
+ discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
+
+ fail( "Artifact should be null for wrong package extension" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testNoArtifactId()
+ throws DiscovererException
+ {
+ try
+ {
+ discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
+
+ fail( "Artifact should be null when artifactId is missing" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+
+ try
+ {
+ discoverer.buildArtifact( "groupId/jars/1.0.jar" );
+
+ fail( "Artifact should be null when artifactId is missing" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testNoType()
+ throws ComponentLookupException, DiscovererException
+ {
+ try
+ {
+ discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
+
+ fail( "Artifact should be null for no type" );
+ }
+ catch ( DiscovererException e )
+ {
+ // excellent
+ }
+ }
+
+ public void testSnapshot()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
+ }
+
+ public void testFinal()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
+ }
+
+ public void testNormal()
+ throws ComponentLookupException, DiscovererException
+ {
+ String testPath = "javax.sql/jars/jdbc-2.0.jar";
+
+ Artifact artifact = discoverer.buildArtifact( testPath );
+
+ assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * @author Edwin Punzalan
- */
-public abstract class AbstractArtifactDiscovererTest
- extends PlexusTestCase
-{
- protected ArtifactDiscoverer discoverer;
-
- private ArtifactFactory factory;
-
- protected ArtifactRepository repository;
-
- protected static final String TEST_OPERATION = "test";
-
- protected abstract String getLayout();
-
- protected abstract File getRepositoryFile();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (ArtifactDiscoverer) lookup( ArtifactDiscoverer.ROLE, getLayout() );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
-
- removeTimestampMetadata();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getRepositoryFile();
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout =
- (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, getLayout() );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version )
- {
- Artifact artifact = factory.createArtifact( groupId, artifactId, version, null, "jar" );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type )
- {
- return factory.createArtifact( groupId, artifactId, version, null, type );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId, String version, String type,
- String classifier )
- {
- return factory.createArtifactWithClassifier( groupId, artifactId, version, type, classifier );
- }
-
- public void testUpdatedInRepository()
- throws ComponentLookupException, DiscovererException, ParseException, IOException
- {
- // Set repository time to 1-1-2000, a time in the distant past so definitely updated
- discoverer.setLastCheckedTime( repository, "update",
- new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" ) );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
- }
-
- public void testNotUpdatedInRepository()
- throws ComponentLookupException, DiscovererException, IOException
- {
- // Set repository time to now, which is after any artifacts, so definitely not updated
- discoverer.setLastCheckedTime( repository, "update", new Date() );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscovery()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
- }
-
- public void testUpdatedInRepositoryBlackout()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.resetLastCheckedTime( repository, "update" );
-
- Artifact artifact = createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" );
- artifact.getFile().setLastModified( System.currentTimeMillis() );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included", artifacts.contains( artifact ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included", artifacts.contains( artifact ) );
- }
-
- public void testUpdatedInRepositoryNotBlackout()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.resetLastCheckedTime( repository, "update" );
-
- Artifact artifact = createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" );
- artifact.getFile().setLastModified( System.currentTimeMillis() - 61000 );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included", artifacts.contains( artifact ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included", artifacts.contains( artifact ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscoveryMetadataAlreadyExists()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.setLastCheckedTime( repository, "update", new Date() );
-
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscoveryOtherMetadataAlreadyExists()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.setLastCheckedTime( repository, "test", new Date() );
-
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
-
- // try again with the updated timestamp
- artifacts = discoverer.discoverArtifacts( repository, "update", null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertFalse( "Check not included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-not-updated", "1.0" ) ) );
- }
-
- public void testNoRepositoryMetadata()
- throws ComponentLookupException, DiscovererException, ParseException, IOException
- {
- removeTimestampMetadata();
-
- // should find all
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check included",
- artifacts.contains( createArtifact( "org.apache.maven.update", "test-updated", "1.0" ) ) );
- }
-
- private void removeTimestampMetadata()
- {
- // remove the metadata that tracks time
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
- file.delete();
- assertFalse( file.exists() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the default artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- * @todo test location of poms, checksums
- */
-public class DefaultArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
-
- protected String getLayout()
- {
- return "default";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- boolean b = path.indexOf( ".svn" ) >= 0;
- if ( b )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, "javax/**", false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax/sql/jdbc/2.0/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path is too short to build an artifact from",
- dPath.getComment() );
-
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongArtifactId()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not correspond to an artifact",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not wrong jar",
- "wrong-artifactId-1.0-20050611.112233-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoType()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1/invalid-1".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1'", "invalid-1".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Built artifact version does not match path version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-2.0.jar'", "invalid-2.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongerVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0/invalid-1.0b.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path version does not corresspond to an artifact version",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0b.jar'", "invalid-1.0b.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongSnapshotVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Failed to create a snapshot artifact: invalid:invalid:jar:1.0:runtime",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.jar'", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithSnapshotBaseVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar".equals(
- path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Built snapshot artifact base version does not match path version: invalid:invalid:jar:1.0-SNAPSHOT:runtime; should have been version: 1.0-20050611.123456-1",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0-20050611-123456-1.jar'",
- "invalid-1.0-20050611.123456-1.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotInclusionWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check snapshot included", artifacts.contains(
- createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testStandalonePoms()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
-
- // cull down to actual artifacts (only standalone poms will have type = pom)
- Map keyedArtifacts = new HashMap();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- String key = a.getGroupId() + ":" + a.getArtifactId() + ":" + a.getVersion();
- if ( !"pom".equals( a.getType() ) || !keyedArtifacts.containsKey( key ) )
- {
- keyedArtifacts.put( key, a );
- }
- }
-
- List models = new ArrayList();
-
- for ( Iterator i = keyedArtifacts.values().iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
-
- if ( "pom".equals( a.getType() ) )
- {
- models.add( a );
- }
- }
-
- assertEquals( 4, models.size() );
-
- // Define order we expect
- Collections.sort( models );
-
- Iterator itr = models.iterator();
- Artifact model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "B", model.getArtifactId() );
- assertEquals( "2.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.maven", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- model = (Artifact) itr.next();
- assertEquals( "org.apache.testgroup", model.getGroupId() );
- assertEquals( "discovery", model.getArtifactId() );
- assertEquals( "1.0", model.getVersion() );
- }
-
- public void testShortPath()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid-1.0.jar" );
-
- fail( "Artifact should be null for short paths" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongArtifactId()
- throws ComponentLookupException
- {
-
- try
- {
- discoverer.buildArtifact( "org/apache/maven/test/1.0-SNAPSHOT/wrong-artifactId-1.0-20050611.112233-1.jar" );
-
- fail( "Artifact should be null for wrong ArtifactId" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-2.0.jar" );
-
- fail( "Artifact should be null for wrong version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testLongVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0/invalid-1.0b.jar" );
-
- fail( "Artifact should be null for long version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testWrongSnapshotVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-SNAPSHOT/invalid-1.0.jar" );
-
- fail( "Artifact should be null for wrong snapshot version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshotBaseVersion()
- throws ComponentLookupException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1.0-20050611.123456-1/invalid-1.0-20050611.123456-1.jar" );
-
- fail( "Artifact should be null for snapshot base version" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testPathWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/some-ejb/1.0/some-ejb-1.0-client.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ), artifact );
- }
-
- public void testWithJavaSourceInclusion()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0-sources.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ), artifact );
- }
-
- public void testDistributionArtifacts()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/testing/1.0/testing-1.0.tar.gz";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ), artifact );
-
- testPath = "org/apache/maven/testing/1.0/testing-1.0.zip";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ), artifact );
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-SNAPSHOT" ), artifact );
-
- testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1.jar";
-
- artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax/sql/jdbc/2.0/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-
- public void testSnapshotWithClassifier()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org/apache/maven/test/1.0-SNAPSHOT/test-1.0-20050611.112233-1-javadoc.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven", "test", "1.0-20050611.112233-1", "jar", "javadoc" ),
- artifact );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * This class tests the DefaultMetadataDiscoverer class.
- */
-public class DefaultMetadataDiscovererTest
- extends PlexusTestCase
-{
- private MetadataDiscoverer discoverer;
-
- private static final String TEST_OPERATION = "test";
-
- private ArtifactRepository repository;
-
- private ArtifactFactory factory;
-
- /**
- *
- */
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- discoverer = (MetadataDiscoverer) lookup( MetadataDiscoverer.ROLE, "default" );
-
- factory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- repository = getRepository();
-
- removeTimestampMetadata();
- }
-
- protected ArtifactRepository getRepository()
- throws Exception
- {
- File basedir = getTestFile( "src/test/repository" );
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- return factory.createArtifactRepository( "discoveryRepo", "file://" + basedir, layout, null, null );
- }
-
- /**
- *
- */
- public void tearDown()
- throws Exception
- {
- super.tearDown();
- discoverer = null;
- }
-
- /**
- * Test if metadata file in wrong directory was added to the kickedOutPaths.
- */
- public void testKickoutWrongDirectory()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, TEST_OPERATION, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "javax/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Unable to build a repository metadata from path",
- dPath.getComment() );
- }
- }
- assertTrue( found );
- }
-
- /**
- * Test if blank metadata file was added to the kickedOutPaths.
- */
- public void testKickoutBlankMetadata()
- throws DiscovererException
- {
- discoverer.discoverMetadata( repository, TEST_OPERATION, null );
- Iterator iter = discoverer.getKickedOutPathsIterator();
- boolean found = false;
- while ( iter.hasNext() && !found )
- {
- DiscovererPath dPath = (DiscovererPath) iter.next();
- String dir = dPath.getPath();
-
- String normalizedDir = dir.replace( '\\', '/' );
- if ( "org/apache/maven/some-ejb/1.0/maven-metadata.xml".equals( normalizedDir ) )
- {
- found = true;
- assertTrue( "Check reason for kickout", dPath.getComment().matches(
- "Error reading metadata file '(.*)': input contained no data" ) );
- }
- }
- assertTrue( found );
- }
-
- private void removeTimestampMetadata()
- throws IOException
- {
- // remove the metadata that tracks time
- File file = new File( repository.getBasedir(), "maven-metadata.xml" );
- System.gc(); // for Windows
- file.delete();
- assertFalse( file.exists() );
- }
-
- public void testDiscoverMetadata()
- throws DiscovererException
- {
- List metadataPaths = discoverer.discoverMetadata( repository, TEST_OPERATION, null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata =
- new SnapshotArtifactRepositoryMetadata( createArtifact( "org.apache.testgroup", "discovery", "1.0" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- metadata = new GroupRepositoryMetadata( "org.apache.maven" );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
- }
-
- public void testUpdatedInRepository()
- throws ComponentLookupException, DiscovererException, ParseException, IOException
- {
- // Set repository time to 1-1-2000, a time in the distant past so definitely updated
- discoverer.setLastCheckedTime( repository, "update",
- new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" ) );
-
- List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-updated" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- // try again with the updated timestamp
- metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
- }
-
- private boolean containsMetadata( List metadataPaths, RepositoryMetadata metadata )
- {
- for ( Iterator i = metadataPaths.iterator(); i.hasNext(); )
- {
- RepositoryMetadata m = (RepositoryMetadata) i.next();
-
- if ( m.getGroupId().equals( metadata.getGroupId() ) )
- {
- if ( m.getArtifactId() == null && metadata.getArtifactId() == null )
- {
- return true;
- }
- else if ( m.getArtifactId() != null && m.getArtifactId().equals( metadata.getArtifactId() ) )
- {
- return true;
- }
- }
- }
- return false;
- }
-
- public void testNotUpdatedInRepository()
- throws ComponentLookupException, DiscovererException, IOException
- {
- // Set repository time to now, which is after any artifacts, so definitely not updated
- discoverer.setLastCheckedTime( repository, "update", new Date() );
-
- List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
- assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscovery()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- // try again with the updated timestamp
- metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscoveryMetadataAlreadyExists()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.setLastCheckedTime( repository, "update", new Date() );
-
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- // try again with the updated timestamp
- metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
- }
-
- public void testNotUpdatedInRepositoryForcedDiscoveryOtherMetadataAlreadyExists()
- throws ComponentLookupException, DiscovererException, IOException
- {
- discoverer.setLastCheckedTime( repository, "test", new Date() );
-
- discoverer.resetLastCheckedTime( repository, "update" );
-
- List metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-not-updated" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
-
- // try again with the updated timestamp
- metadataPaths = discoverer.discoverMetadata( repository, "update", null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- assertFalse( "Check not included", containsMetadata( metadataPaths, metadata ) );
- }
-
- public void testNoRepositoryMetadata()
- throws ComponentLookupException, DiscovererException, ParseException, IOException
- {
- removeTimestampMetadata();
-
- // should find all
- List metadataPaths = discoverer.discoverMetadata( repository, TEST_OPERATION, null );
- assertNotNull( "Check metadata not null", metadataPaths );
-
- RepositoryMetadata metadata =
- new ArtifactRepositoryMetadata( createArtifact( "org.apache.maven.update", "test-updated" ) );
- assertTrue( "Check included", containsMetadata( metadataPaths, metadata ) );
- }
-
- protected Artifact createArtifact( String groupId, String artifactId )
- {
- return createArtifact( groupId, artifactId, "1.0" );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String version )
- {
- return factory.createArtifact( groupId, artifactId, version, null, "jar" );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.discovery;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.codehaus.plexus.component.repository.exception.ComponentLookupException;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Test the legacy artifact discoverer.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class LegacyArtifactDiscovererTest
- extends AbstractArtifactDiscovererTest
-{
- protected String getLayout()
- {
- return "legacy";
- }
-
- protected File getRepositoryFile()
- {
- return getTestFile( "src/test/legacy-repository" );
- }
-
- public void testDefaultExcludes()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( path.indexOf( ".svn" ) >= 0 )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not .svn", a.getFile().getPath().indexOf( ".svn" ) >= 0 );
- }
- }
-
- public void testStandardExcludes()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "KEYS".equals( path ) )
- {
- found = true;
- assertEquals( "Check comment", "Artifact was in the specified list of exclusions", dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not KEYS", "KEYS".equals( a.getFile().getName() ) );
- }
- }
-
- public void testBlacklistedExclude()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, "javax.sql/**", false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getExcludedPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "javax.sql/jars/jdbc-2.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check comment is about blacklisting", "Artifact was in the specified list of exclusions",
- dPath.getComment() );
- }
- }
- assertTrue( "Check exclusion was found", found );
-
- assertFalse( "Check jdbc not included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- }
-
- public void testKickoutWithShortPath()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithLongPath()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/1.0/invalid-1.0.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout",
- "Path does not match a legacy repository path for an artifact", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.jar", "invalid-1.0.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithInvalidType()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/foo/invalid-1.0.foo".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path artifact type does not corresspond to an artifact type",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not invalid-1.0.foo", "invalid-1.0.foo".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoExtension()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/no-extension".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename does not have an extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'no-extension'", "no-extension".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithWrongExtension()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid-1.0.rar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path type does not match the extension",
- dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid-1.0.rar'", "invalid-1.0.rar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testKickoutWithNoVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
- boolean found = false;
- for ( Iterator i = discoverer.getKickedOutPathsIterator(); i.hasNext() && !found; )
- {
- DiscovererPath dPath = (DiscovererPath) i.next();
-
- String path = dPath.getPath();
-
- if ( "invalid/jars/invalid.jar".equals( path.replace( '\\', '/' ) ) )
- {
- found = true;
- assertEquals( "Check reason for kickout", "Path filename version is empty", dPath.getComment() );
- }
- }
- assertTrue( "Check kickout was found", found );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact a = (Artifact) i.next();
- assertFalse( "Check not 'invalid.jar'", "invalid.jar".equals( a.getFile().getName() ) );
- }
- }
-
- public void testInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0" ) ) );
- }
-
- public void testTextualVersion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "UNKNOWN" ) ) );
- }
-
- public void testArtifactWithClassifier()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included",
- artifacts.contains( createArtifact( "org.apache.maven", "some-ejb", "1.0", "jar", "client" ) ) );
- }
-
- public void testJavaSourcesInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains(
- createArtifact( "org.apache.maven", "testing", "1.0", "java-source", "sources" ) ) );
- }
-
- public void testDistributionInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check zip included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-zip" ) ) );
-
- assertTrue( "Check tar.gz included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0", "distribution-tgz" ) ) );
- }
-
- public void testSnapshotInclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertTrue( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testSnapshotExclusion()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, false );
- assertNotNull( "Check artifacts not null", artifacts );
-
- assertTrue( "Check normal included", artifacts.contains( createArtifact( "javax.sql", "jdbc", "2.0" ) ) );
- assertFalse( "Check snapshot included",
- artifacts.contains( createArtifact( "org.apache.maven", "testing", "1.0-20050611.112233-1" ) ) );
- }
-
- public void testFileSet()
- throws DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check file is set", artifact.getFile() );
- }
- }
-
- public void testRepositorySet()
- throws MalformedURLException, DiscovererException
- {
- List artifacts = discoverer.discoverArtifacts( repository, TEST_OPERATION, null, true );
- assertNotNull( "Check artifacts not null", artifacts );
-
- String url = repository.getUrl();
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
- assertNotNull( "Check repository set", artifact.getRepository() );
- assertEquals( "Check repository url is correct", url, artifact.getRepository().getUrl() );
- }
- }
-
- public void testWrongArtifactPackaging()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "org.apache.maven.test/jars/artifactId-1.0.jar.md5" );
-
- fail( "Artifact should be null for wrong package extension" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoArtifactId()
- throws DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "groupId/jars/-1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
-
- try
- {
- discoverer.buildArtifact( "groupId/jars/1.0.jar" );
-
- fail( "Artifact should be null when artifactId is missing" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testNoType()
- throws ComponentLookupException, DiscovererException
- {
- try
- {
- discoverer.buildArtifact( "invalid/invalid/1/invalid-1" );
-
- fail( "Artifact should be null for no type" );
- }
- catch ( DiscovererException e )
- {
- // excellent
- }
- }
-
- public void testSnapshot()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-SNAPSHOT.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-SNAPSHOT" ), artifact );
- }
-
- public void testFinal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "org.apache.maven.test/jars/maven-model-1.0-final-20060606.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "org.apache.maven.test", "maven-model", "1.0-final-20060606" ), artifact );
- }
-
- public void testNormal()
- throws ComponentLookupException, DiscovererException
- {
- String testPath = "javax.sql/jars/jdbc-2.0.jar";
-
- Artifact artifact = discoverer.buildArtifact( testPath );
-
- assertEquals( createArtifact( "javax.sql", "jdbc", "2.0" ), artifact );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.indexing;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.query.Query;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Maintain an artifact index on the repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryArtifactIndex
+{
+ /**
+ * Indexes the artifacts found within the specified list of index records. If the artifacts are already in the
+ * repository they are updated.
+ *
+ * @param records the artifacts to index
+ * @throws RepositoryIndexException if there is a problem indexing the records
+ */
+ void indexRecords( Collection records )
+ throws RepositoryIndexException;
+
+ /**
+ * Search the index based on the search criteria specified. Returns a list of index records.
+ *
+ * @param query The query that contains the search criteria
+ * @return the index records found
+ * @throws RepositoryIndexSearchException if there is a problem searching
+ * @todo should it return "SearchResult" instances that contain the index record and other search data (like score?)
+ */
+ List search( Query query )
+ throws RepositoryIndexSearchException;
+
+ /**
+ * Check if the index already exists.
+ *
+ * @return true if the index already exists
+ * @throws RepositoryIndexException if the index location is not valid
+ */
+ boolean exists()
+ throws RepositoryIndexException;
+
+ /**
+ * Delete records from the index. Simply ignore the request any did not exist.
+ *
+ * @param records the records to delete
+ * @throws RepositoryIndexException if there is a problem removing the record
+ */
+ void deleteRecords( Collection records )
+ throws RepositoryIndexException;
+}
--- /dev/null
+package org.apache.maven.archiva.indexing;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+
+/**
+ * Obtain an index instance.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryArtifactIndexFactory
+{
+ /**
+ * Plexus role.
+ */
+ String ROLE = RepositoryArtifactIndexFactory.class.getName();
+
+ /**
+ * Method to create an instance of the standard index.
+ *
+ * @param indexPath the path where the index will be created/updated
+ * @return the index instance
+ */
+ RepositoryArtifactIndex createStandardIndex( File indexPath );
+
+ /**
+ * Method to create an instance of the minimal index.
+ *
+ * @param indexPath the path where the index will be created/updated
+ * @return the index instance
+ */
+ RepositoryArtifactIndex createMinimalIndex( File indexPath );
+}
--- /dev/null
+package org.apache.maven.archiva.indexing;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class RepositoryIndexException
+ extends Exception
+{
+ public RepositoryIndexException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public RepositoryIndexException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Brett Porter
+ */
+public class RepositoryIndexSearchException
+ extends Exception
+{
+ public RepositoryIndexSearchException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.document.Document;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+
+import java.text.ParseException;
+
+/**
+ * Converts repository records to Lucene documents.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface LuceneIndexRecordConverter
+{
+ /**
+ * Convert an index record to a Lucene document.
+ *
+ * @param record the record
+ * @return the document
+ */
+ Document convert( RepositoryIndexRecord record );
+
+ /**
+ * Convert a Lucene document to an index record.
+ *
+ * @param document the document
+ * @return the record
+ * @throws java.text.ParseException if there is a problem parsing a field (specifically, dates)
+ */
+ RepositoryIndexRecord convert( Document document )
+ throws ParseException;
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumberTools;
+import org.apache.maven.archiva.indexing.record.MinimalArtifactIndexRecord;
+import org.apache.maven.archiva.indexing.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.text.ParseException;
+import java.util.Arrays;
+
+/**
+ * Convert the minimal index record to a Lucene document.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneMinimalIndexRecordConverter
+ implements LuceneIndexRecordConverter
+{
+ public Document convert( RepositoryIndexRecord record )
+ {
+ MinimalArtifactIndexRecord rec = (MinimalArtifactIndexRecord) record;
+
+ Document document = new Document();
+ addTokenizedField( document, MinimalIndexRecordFields.FILENAME, rec.getFilename() );
+ addUntokenizedField( document, MinimalIndexRecordFields.LAST_MODIFIED,
+ DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
+ addUntokenizedField( document, MinimalIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
+ addUntokenizedField( document, MinimalIndexRecordFields.MD5, rec.getMd5Checksum() );
+ addTokenizedField( document, MinimalIndexRecordFields.CLASSES,
+ StringUtils.join( rec.getClasses().iterator(), "\n" ) );
+
+ return document;
+ }
+
+ public RepositoryIndexRecord convert( Document document )
+ throws ParseException
+ {
+ MinimalArtifactIndexRecord record = new MinimalArtifactIndexRecord();
+
+ record.setFilename( document.get( MinimalIndexRecordFields.FILENAME ) );
+ record.setLastModified( DateTools.stringToTime( document.get( MinimalIndexRecordFields.LAST_MODIFIED ) ) );
+ record.setSize( NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
+ record.setMd5Checksum( document.get( MinimalIndexRecordFields.MD5 ) );
+ record.setClasses( Arrays.asList( document.get( MinimalIndexRecordFields.CLASSES ).split( "\n" ) ) );
+
+ return record;
+ }
+
+ private static void addUntokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addTokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.query.Query;
+
+/**
+ * A holder for a lucene query to pass to the indexer API.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneQuery
+ implements Query
+{
+ private final org.apache.lucene.search.Query query;
+
+ public LuceneQuery( org.apache.lucene.search.Query query )
+ {
+ this.query = query;
+ }
+
+ org.apache.lucene.search.Query getLuceneQuery()
+ {
+ return query;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.query.Query;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Lucene implementation of a repository index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneRepositoryArtifactIndex
+ implements RepositoryArtifactIndex
+{
+ /**
+ * The location of the index on the file system.
+ */
+ private File indexLocation;
+
+ /**
+ * Convert repository records to Lucene documents.
+ */
+ private LuceneIndexRecordConverter converter;
+
+ private static final String FLD_PK = "pk";
+
+ public LuceneRepositoryArtifactIndex( File indexPath, LuceneIndexRecordConverter converter )
+ {
+ this.indexLocation = indexPath;
+ this.converter = converter;
+ }
+
+ public void indexRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ deleteRecords( records );
+
+ addRecords( records );
+ }
+
+ private void addRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ IndexWriter indexWriter;
+ try
+ {
+ indexWriter = new IndexWriter( indexLocation, getAnalyzer(), !exists() );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Unable to open index", e );
+ }
+
+ try
+ {
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
+
+ if ( record != null )
+ {
+ Document document = converter.convert( record );
+ document.add(
+ new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+
+ indexWriter.addDocument( document );
+ }
+ }
+
+ indexWriter.optimize();
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Failed to add an index document", e );
+ }
+ finally
+ {
+ close( indexWriter );
+ }
+ }
+
+ private void close( IndexWriter indexWriter )
+ throws RepositoryIndexException
+ {
+ try
+ {
+ if ( indexWriter != null )
+ {
+ indexWriter.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( e.getMessage(), e );
+ }
+ }
+
+ private Analyzer getAnalyzer()
+ {
+ // TODO: investigate why changed in original! Probably for MD5 and number querying.
+ return new StandardAnalyzer();
+ }
+
+ public void deleteRecords( Collection records )
+ throws RepositoryIndexException
+ {
+ if ( exists() )
+ {
+ IndexReader indexReader = null;
+ try
+ {
+ indexReader = IndexReader.open( indexLocation );
+
+ for ( Iterator artifacts = records.iterator(); artifacts.hasNext(); )
+ {
+ RepositoryIndexRecord record = (RepositoryIndexRecord) artifacts.next();
+
+ if ( record != null )
+ {
+ Term term = new Term( FLD_PK, record.getPrimaryKey() );
+
+ indexReader.deleteDocuments( term );
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
+ }
+ finally
+ {
+ if ( indexReader != null )
+ {
+ closeQuietly( indexReader );
+ }
+ }
+ }
+ }
+
+ public boolean exists()
+ throws RepositoryIndexException
+ {
+ if ( IndexReader.indexExists( indexLocation ) )
+ {
+ return true;
+ }
+ else if ( !indexLocation.exists() )
+ {
+ return false;
+ }
+ else if ( indexLocation.isDirectory() )
+ {
+ if ( indexLocation.listFiles().length > 1 )
+ {
+ throw new RepositoryIndexException( indexLocation + " is not a valid index directory." );
+ }
+ else
+ {
+ return false;
+ }
+ }
+ else
+ {
+ throw new RepositoryIndexException( indexLocation + " is not a directory." );
+ }
+ }
+
+ public List search( Query query )
+ throws RepositoryIndexSearchException
+ {
+ LuceneQuery lQuery = (LuceneQuery) query;
+
+ org.apache.lucene.search.Query luceneQuery = lQuery.getLuceneQuery();
+
+ IndexSearcher searcher;
+ try
+ {
+ searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
+ }
+
+ List records = new ArrayList();
+ try
+ {
+ Hits hits = searcher.search( luceneQuery );
+ for ( int i = 0; i < hits.length(); i++ )
+ {
+ Document doc = hits.doc( i );
+
+ records.add( converter.convert( doc ) );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
+ }
+ catch ( ParseException e )
+ {
+ throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( searcher );
+ }
+
+ return records;
+ }
+
+ private static void closeQuietly( IndexSearcher searcher )
+ {
+ try
+ {
+ if ( searcher != null )
+ {
+ searcher.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+
+ private static void closeQuietly( IndexReader reader )
+ {
+ try
+ {
+ if ( reader != null )
+ {
+ reader.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+
+import java.io.File;
+
+/**
+ * Factory for Lucene artifact index instances.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory" role-hint="lucene"
+ */
+public class LuceneRepositoryArtifactIndexFactory
+ implements RepositoryArtifactIndexFactory
+{
+ public RepositoryArtifactIndex createStandardIndex( File indexPath )
+ {
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter() );
+ }
+
+ public RepositoryArtifactIndex createMinimalIndex( File indexPath )
+ {
+ return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.document.DateTools;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumberTools;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexing.record.StandardArtifactIndexRecord;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.text.ParseException;
+import java.util.Arrays;
+
+/**
+ * Convert the standard index record to a Lucene document.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneStandardIndexRecordConverter
+ implements LuceneIndexRecordConverter
+{
+ public Document convert( RepositoryIndexRecord record )
+ {
+ StandardArtifactIndexRecord rec = (StandardArtifactIndexRecord) record;
+
+ Document document = new Document();
+ addTokenizedField( document, StandardIndexRecordFields.FILENAME, rec.getFilename() );
+ addTokenizedField( document, StandardIndexRecordFields.GROUPID, rec.getGroupId() );
+ addExactField( document, StandardIndexRecordFields.GROUPID_EXACT, rec.getGroupId() );
+ addTokenizedField( document, StandardIndexRecordFields.ARTIFACTID, rec.getArtifactId() );
+ addExactField( document, StandardIndexRecordFields.ARTIFACTID_EXACT, rec.getArtifactId() );
+ addTokenizedField( document, StandardIndexRecordFields.VERSION, rec.getVersion() );
+ addExactField( document, StandardIndexRecordFields.VERSION_EXACT, rec.getVersion() );
+ addTokenizedField( document, StandardIndexRecordFields.BASE_VERSION, rec.getBaseVersion() );
+ addExactField( document, StandardIndexRecordFields.BASE_VERSION_EXACT, rec.getBaseVersion() );
+ addUntokenizedField( document, StandardIndexRecordFields.TYPE, rec.getType() );
+ addTokenizedField( document, StandardIndexRecordFields.CLASSIFIER, rec.getClassifier() );
+ addUntokenizedField( document, StandardIndexRecordFields.PACKAGING, rec.getPackaging() );
+ addUntokenizedField( document, StandardIndexRecordFields.REPOSITORY, rec.getRepository() );
+ addUntokenizedField( document, StandardIndexRecordFields.LAST_MODIFIED,
+ DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
+ addUntokenizedField( document, StandardIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
+ addUntokenizedField( document, StandardIndexRecordFields.MD5, rec.getMd5Checksum() );
+ addUntokenizedField( document, StandardIndexRecordFields.SHA1, rec.getSha1Checksum() );
+ if ( rec.getClasses() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.CLASSES,
+ StringUtils.join( rec.getClasses().iterator(), "\n" ) );
+ }
+ if ( rec.getFiles() != null )
+ {
+ addTokenizedField( document, StandardIndexRecordFields.FILES,
+ StringUtils.join( rec.getFiles().iterator(), "\n" ) );
+ }
+ addUntokenizedField( document, StandardIndexRecordFields.PLUGIN_PREFIX, rec.getPluginPrefix() );
+ addUntokenizedField( document, StandardIndexRecordFields.INCEPTION_YEAR, rec.getInceptionYear() );
+ addTokenizedField( document, StandardIndexRecordFields.PROJECT_NAME, rec.getProjectName() );
+ addTokenizedField( document, StandardIndexRecordFields.PROJECT_DESCRIPTION, rec.getProjectDescription() );
+/* TODO: add later
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_LICENSE_URLS, "" ) );
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_DEPENDENCIES, "" ) );
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_REPORT, "" ) );
+ document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_BUILD, "" ) );
+*/
+
+ return document;
+ }
+
+ public RepositoryIndexRecord convert( Document document )
+ throws ParseException
+ {
+ StandardArtifactIndexRecord record = new StandardArtifactIndexRecord();
+
+ record.setFilename( document.get( StandardIndexRecordFields.FILENAME ) );
+ record.setGroupId( document.get( StandardIndexRecordFields.GROUPID ) );
+ record.setArtifactId( document.get( StandardIndexRecordFields.ARTIFACTID ) );
+ record.setVersion( document.get( StandardIndexRecordFields.VERSION ) );
+ record.setBaseVersion( document.get( StandardIndexRecordFields.BASE_VERSION ) );
+ record.setType( document.get( StandardIndexRecordFields.TYPE ) );
+ record.setClassifier( document.get( StandardIndexRecordFields.CLASSIFIER ) );
+ record.setPackaging( document.get( StandardIndexRecordFields.PACKAGING ) );
+ record.setRepository( document.get( StandardIndexRecordFields.REPOSITORY ) );
+ record.setLastModified( DateTools.stringToTime( document.get( StandardIndexRecordFields.LAST_MODIFIED ) ) );
+ record.setSize( NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
+ record.setMd5Checksum( document.get( StandardIndexRecordFields.MD5 ) );
+ record.setSha1Checksum( document.get( StandardIndexRecordFields.SHA1 ) );
+ String classes = document.get( StandardIndexRecordFields.CLASSES );
+ if ( classes != null )
+ {
+ record.setClasses( Arrays.asList( classes.split( "\n" ) ) );
+ }
+ String files = document.get( StandardIndexRecordFields.FILES );
+ if ( files != null )
+ {
+ record.setFiles( Arrays.asList( files.split( "\n" ) ) );
+ }
+ record.setPluginPrefix( document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ record.setInceptionYear( document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ record.setProjectName( document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ record.setProjectDescription( document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+
+ return record;
+ }
+
+ private static void addUntokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addExactField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
+ }
+ }
+
+ private static void addTokenizedField( Document document, String name, String value )
+ {
+ if ( value != null )
+ {
+ document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Class to hold multiple SinglePhraseQueries and/or other CompoundQueries.
+ *
+ * @author Edwin Punzalan
+ */
+public class CompoundQuery
+ implements Query
+{
+ /**
+ * The query terms.
+ */
+ private final List compoundQueryTerms = new ArrayList();
+
+ /**
+ * Appends a required term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void and( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.and( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends an optional term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void or( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.or( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends a prohibited term to this query.
+ *
+ * @param term the term to be appended to this query
+ */
+ public void not( QueryTerm term )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.not( new SingleTermQuery( term ) ) );
+ }
+
+ /**
+ * Appends a required subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void and( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.and( query ) );
+ }
+
+ /**
+ * Appends an optional subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void or( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.or( query ) );
+ }
+
+ /**
+ * Appends a prohibited subquery to this query.
+ *
+ * @param query the subquery to be appended to this query
+ */
+ public void not( Query query )
+ {
+ compoundQueryTerms.add( CompoundQueryTerm.not( query ) );
+ }
+
+ /**
+ * Method to get the List of Queries appended into this
+ *
+ * @return List of all Queries added to this Query
+ */
+ public List getCompoundQueryTerms()
+ {
+ return compoundQueryTerms;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Base of all query terms.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class CompoundQueryTerm
+{
+ /**
+ * The query to add to the compound query.
+ */
+ private final Query query;
+
+ /**
+ * Whether the term is required (an AND).
+ */
+ private final boolean required;
+
+ /**
+ * Whether the term is prohibited (a NOT).
+ */
+ private final boolean prohibited;
+
+ /**
+ * Class constructor
+ *
+ * @param query the subquery to add
+ * @param required whether the term is required (an AND)
+ * @param prohibited whether the term is prohibited (a NOT)
+ */
+ private CompoundQueryTerm( Query query, boolean required, boolean prohibited )
+ {
+ this.query = query;
+ this.prohibited = prohibited;
+ this.required = required;
+ }
+
+ /**
+ * Method to test if the Query is a search requirement
+ *
+ * @return true if this Query is a search requirement, otherwise returns false
+ */
+ public boolean isRequired()
+ {
+ return required;
+ }
+
+ /**
+ * Method to test if the Query is prohibited in the search result
+ *
+ * @return true if this Query is prohibited in the search result
+ */
+ public boolean isProhibited()
+ {
+ return prohibited;
+ }
+
+
+ /**
+ * The subquery to execute.
+ *
+ * @return the query
+ */
+ public Query getQuery()
+ {
+ return query;
+ }
+
+ static CompoundQueryTerm and( Query query )
+ {
+ return new CompoundQueryTerm( query, true, false );
+ }
+
+ static CompoundQueryTerm or( Query query )
+ {
+ return new CompoundQueryTerm( query, false, false );
+ }
+
+ static CompoundQueryTerm not( Query query )
+ {
+ return new CompoundQueryTerm( query, false, true );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Interface to label the query classes
+ *
+ * @author Edwin Punzalan
+ */
+public interface Query
+{
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Class to hold a single field search condition
+ *
+ * @author Edwin Punzalan
+ */
+public class QueryTerm
+{
+ private String field;
+
+ private String value;
+
+ /**
+ * Class constructor
+ *
+ * @param field the index field to search
+ * @param value the index value requirement
+ */
+ public QueryTerm( String field, String value )
+ {
+ this.field = field;
+ this.value = value;
+ }
+
+ /**
+ * Method to retrieve the name of the index field searched
+ *
+ * @return the name of the index field
+ */
+ public String getField()
+ {
+ return field;
+ }
+
+ /**
+ * Method to retrieve the value used in searching the index field
+ *
+ * @return the value to corresspond the index field
+ */
+ public String getValue()
+ {
+ return value;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;\r
+\r
+/*\r
+ * Copyright 2005-2006 The Apache Software Foundation.\r
+ *\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ * http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ */\r
+\r
+/**\r
+ * Query object that handles range queries (presently used for dates).\r
+ *\r
+ * @author Maria Odea Ching\r
+ * @author Brett Porter\r
+ */\r
+public class RangeQuery\r
+ implements Query\r
+{\r
+ /**\r
+ * Whether values equal to the boundaries are included in the query results.\r
+ */\r
+ private final boolean inclusive;\r
+\r
+ /**\r
+ * The lower bound.\r
+ */\r
+ private final QueryTerm begin;\r
+\r
+ /**\r
+ * The upper bound.\r
+ */\r
+ private final QueryTerm end;\r
+\r
+ /**\r
+ * Constructor.\r
+ *\r
+ * @param begin the lower bound\r
+ * @param end the upper bound\r
+ * @param inclusive whether to include the boundaries in the query\r
+ */\r
+ private RangeQuery( QueryTerm begin, QueryTerm end, boolean inclusive )\r
+ {\r
+ this.begin = begin;\r
+ this.end = end;\r
+ this.inclusive = inclusive;\r
+ }\r
+\r
+ /**\r
+ * Create an open range, including all results.\r
+ *\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createOpenRange()\r
+ {\r
+ return new RangeQuery( null, null, false );\r
+ }\r
+\r
+ /**\r
+ * Create a bounded range, excluding the endpoints.\r
+ *\r
+ * @param begin the lower bound value to compare to\r
+ * @param end the upper bound value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createExclusiveRange( QueryTerm begin, QueryTerm end )\r
+ {\r
+ return new RangeQuery( begin, end, false );\r
+ }\r
+\r
+ /**\r
+ * Create a bounded range, including the endpoints.\r
+ *\r
+ * @param begin the lower bound value to compare to\r
+ * @param end the upper bound value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createInclusiveRange( QueryTerm begin, QueryTerm end )\r
+ {\r
+ return new RangeQuery( begin, end, true );\r
+ }\r
+\r
+ /**\r
+ * Create a range that is greater than or equal to a given term.\r
+ *\r
+ * @param begin the value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createGreaterThanOrEqualToRange( QueryTerm begin )\r
+ {\r
+ return new RangeQuery( begin, null, true );\r
+ }\r
+\r
+ /**\r
+ * Create a range that is greater than a given term.\r
+ *\r
+ * @param begin the value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createGreaterThanRange( QueryTerm begin )\r
+ {\r
+ return new RangeQuery( begin, null, false );\r
+ }\r
+\r
+ /**\r
+ * Create a range that is less than or equal to a given term.\r
+ *\r
+ * @param end the value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createLessThanOrEqualToRange( QueryTerm end )\r
+ {\r
+ return new RangeQuery( null, end, true );\r
+ }\r
+\r
+ /**\r
+ * Create a range that is less than a given term.\r
+ *\r
+ * @param end the value to compare to\r
+ * @return the query object\r
+ */\r
+ public static RangeQuery createLessThanRange( QueryTerm end )\r
+ {\r
+ return new RangeQuery( null, end, false );\r
+ }\r
+\r
+ public QueryTerm getBegin()\r
+ {\r
+ return begin;\r
+ }\r
+\r
+ public QueryTerm getEnd()\r
+ {\r
+ return end;\r
+ }\r
+\r
+ public boolean isInclusive()\r
+ {\r
+ return inclusive;\r
+ }\r
+\r
+}\r
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Query for a single term.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class SingleTermQuery
+ implements Query
+{
+ /**
+ * The term to query for.
+ */
+ private final QueryTerm term;
+
+ /**
+ * Constructor.
+ *
+ * @param term the term to query
+ */
+ public SingleTermQuery( QueryTerm term )
+ {
+ this.term = term;
+ }
+
+ /**
+ * Shorthand constructor - create a single term query from a field and value
+ *
+ * @param field the field name
+ * @param value the value to check for
+ */
+ public SingleTermQuery( String field, String value )
+ {
+ this.term = new QueryTerm( field, value );
+ }
+
+ public String getField()
+ {
+ return term.getField();
+ }
+
+ public String getValue()
+ {
+ return term.getValue();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+
+/**
+ * Base class for the index record factories.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractArtifactIndexRecordFactory
+ extends AbstractLogEnabled
+ implements RepositoryIndexRecordFactory
+{
+ protected String readChecksum( File file, Digester digester )
+ {
+ String checksum;
+ try
+ {
+ checksum = digester.calc( file ).toLowerCase();
+ }
+ catch ( DigesterException e )
+ {
+ getLogger().error( "Error getting checksum for artifact file, leaving empty in index: " + e.getMessage() );
+ checksum = null;
+ }
+ return checksum;
+ }
+
+ protected List readFilesInArchive( File file )
+ throws IOException
+ {
+ ZipFile zipFile = new ZipFile( file );
+ List files;
+ try
+ {
+ files = new ArrayList( zipFile.size() );
+
+ for ( Enumeration entries = zipFile.entries(); entries.hasMoreElements(); )
+ {
+ ZipEntry entry = (ZipEntry) entries.nextElement();
+
+ files.add( entry.getName() );
+ }
+ }
+ finally
+ {
+ closeQuietly( zipFile );
+ }
+ return files;
+ }
+
+ protected static boolean isClass( String name )
+ {
+ // TODO: verify if class is public or protected (this might require the original ZipEntry)
+ return name.endsWith( ".class" ) && name.lastIndexOf( "$" ) < 0;
+ }
+
+ protected static void closeQuietly( ZipFile zipFile )
+ {
+ try
+ {
+ if ( zipFile != null )
+ {
+ zipFile.close();
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignored
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+import java.util.Date;
+import java.util.List;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The a record with the fields in the minimal index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class MinimalArtifactIndexRecord
+ implements RepositoryIndexRecord
+{
+ /**
+ * The classes in the archive for the artifact, if it is a JAR.
+ */
+ private List classes;
+
+ /**
+ * The MD5 checksum of the artifact file.
+ */
+ private String md5Checksum;
+
+ /**
+ * The filename of the artifact file (no path).
+ */
+ private String filename;
+
+ /**
+ * The timestamp that the artifact file was last modified. Granularity is seconds.
+ */
+ private long lastModified;
+
+ /**
+ * The size of the artifact file in bytes.
+ */
+ private long size;
+
+ private static final int MS_PER_SEC = 1000;
+
+ public void setClasses( List classes )
+ {
+ this.classes = classes;
+ }
+
+ public void setMd5Checksum( String md5Checksum )
+ {
+ this.md5Checksum = md5Checksum;
+ }
+
+ public void setFilename( String filename )
+ {
+ this.filename = filename;
+ }
+
+ public void setLastModified( long lastModified )
+ {
+ this.lastModified = lastModified - lastModified % MS_PER_SEC;
+ }
+
+ public void setSize( long size )
+ {
+ this.size = size;
+ }
+
+ public List getClasses()
+ {
+ return classes;
+ }
+
+ public String getMd5Checksum()
+ {
+ return md5Checksum;
+ }
+
+ public String getFilename()
+ {
+ return filename;
+ }
+
+ public long getLastModified()
+ {
+ return lastModified;
+ }
+
+ public long getSize()
+ {
+ return size;
+ }
+
+ /**
+ * @noinspection RedundantIfStatement
+ */
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+ if ( obj == null || getClass() != obj.getClass() )
+ {
+ return false;
+ }
+
+ MinimalArtifactIndexRecord that = (MinimalArtifactIndexRecord) obj;
+
+ if ( lastModified != that.lastModified )
+ {
+ return false;
+ }
+ if ( size != that.size )
+ {
+ return false;
+ }
+ if ( classes != null ? !classes.equals( that.classes ) : that.classes != null )
+ {
+ return false;
+ }
+ if ( !filename.equals( that.filename ) )
+ {
+ return false;
+ }
+ if ( md5Checksum != null ? !md5Checksum.equals( that.md5Checksum ) : that.md5Checksum != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * @noinspection UnnecessaryParentheses
+ */
+ public int hashCode()
+ {
+ int result = classes != null ? classes.hashCode() : 0;
+ result = 31 * result + ( md5Checksum != null ? md5Checksum.hashCode() : 0 );
+ result = 31 * result + filename.hashCode();
+ result = 31 * result + (int) ( lastModified ^ ( lastModified >>> 32 ) );
+ result = 31 * result + (int) ( size ^ ( size >>> 32 ) );
+ return result;
+ }
+
+ public String toString()
+ {
+ return "Filename: " + filename + "; checksum: " + md5Checksum + "; size: " + size + "; lastModified: " +
+ new Date( lastModified ) + "; classes: " + classes;
+ }
+
+ public String getPrimaryKey()
+ {
+ return filename;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.artifact.Artifact;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * An index record type for the minimal index.
+ *
+ * @author Edwin Punzalan
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory" role-hint="minimal"
+ */
+public class MinimalArtifactIndexRecordFactory
+ extends AbstractArtifactIndexRecordFactory
+{
+ /* List of types to index. */
+ private static final Set INDEXED_TYPES = new HashSet( Arrays.asList( new String[]{"jar", "maven-plugin"} ) );
+
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ protected Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ protected Digester md5Digester;
+
+ public RepositoryIndexRecord createRecord( Artifact artifact )
+ {
+ MinimalArtifactIndexRecord record = null;
+
+ File file = artifact.getFile();
+ if ( file != null && INDEXED_TYPES.contains( artifact.getType() ) && file.exists() )
+ {
+ String md5 = readChecksum( file, md5Digester );
+
+ List files = null;
+ try
+ {
+ files = readFilesInArchive( file );
+ }
+ catch ( IOException e )
+ {
+ getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
+ }
+
+ if ( files != null )
+ {
+ record = new MinimalArtifactIndexRecord();
+ record.setMd5Checksum( md5 );
+ record.setFilename( artifact.getRepository().pathOf( artifact ) );
+ record.setLastModified( file.lastModified() );
+ record.setSize( file.length() );
+ record.setClasses( getClassesFromFiles( files ) );
+ }
+ }
+ return record;
+ }
+
+ private List getClassesFromFiles( List files )
+ {
+ List classes = new ArrayList();
+
+ for ( Iterator i = files.iterator(); i.hasNext(); )
+ {
+ String name = (String) i.next();
+
+ if ( isClass( name ) )
+ {
+ classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
+ }
+ }
+
+ return classes;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The fields in a minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo should be an enum
+ */
+public class MinimalIndexRecordFields
+{
+ public static final String FILENAME = "j";
+
+ public static final String LAST_MODIFIED = "d";
+
+ public static final String FILE_SIZE = "s";
+
+ public static final String MD5 = "m";
+
+ public static final String CLASSES = "c";
+
+ private MinimalIndexRecordFields()
+ {
+ // No touchy!
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A repository index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryIndexRecord
+{
+ /**
+ * Get the primary key used to identify the record uniquely in the index.
+ *
+ * @return the primary key
+ */
+ String getPrimaryKey();
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * The layout of a record in a repository index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface RepositoryIndexRecordFactory
+{
+ /**
+ * The Plexus role.
+ */
+ String ROLE = RepositoryIndexRecordFactory.class.getName();
+
+ /**
+ * Create an index record from an artifact.
+ *
+ * @param artifact the artifact
+ * @return the index record
+ * @throws RepositoryIndexException if there is a problem constructing the record (due to not being able to read the artifact file as a POM)
+ */
+ RepositoryIndexRecord createRecord( Artifact artifact )
+ throws RepositoryIndexException;
+
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+import java.util.List;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The a record with the fields in the standard index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class StandardArtifactIndexRecord
+ extends MinimalArtifactIndexRecord
+{
+ /**
+ * The SHA-1 checksum of the artifact file.
+ */
+ private String sha1Checksum;
+
+ /**
+ * The artifact's group.
+ */
+ private String groupId;
+
+ /**
+ * The artifact's identifier within the group.
+ */
+ private String artifactId;
+
+ /**
+ * The artifact's version.
+ */
+ private String version;
+
+ /**
+ * The classifier, if there is one.
+ */
+ private String classifier;
+
+ /**
+ * The artifact type (from the file).
+ */
+ private String type;
+
+ /**
+ * A list of files (separated by '\n') in the artifact if it is an archive.
+ */
+ private List files;
+
+ /**
+ * The identifier of the repository that the artifact came from.
+ */
+ private String repository;
+
+ /**
+ * The packaging specified in the POM for this artifact.
+ */
+ private String packaging;
+
+ /**
+ * The plugin prefix specified in the metadata if the artifact is a plugin.
+ */
+ private String pluginPrefix;
+
+ /**
+ * The year the project was started.
+ */
+ private String inceptionYear;
+
+ /**
+ * The description of the project.
+ */
+ private String projectDescription;
+
+ /**
+ * The name of the project.
+ */
+ private String projectName;
+
+ /**
+ * The base version (before the snapshot is determined).
+ */
+ private String baseVersion;
+
+ public void setSha1Checksum( String sha1Checksum )
+ {
+ this.sha1Checksum = sha1Checksum;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public void setVersion( String version )
+ {
+ this.version = version;
+ }
+
+ public void setClassifier( String classifier )
+ {
+ this.classifier = classifier;
+ }
+
+ public void setType( String type )
+ {
+ this.type = type;
+ }
+
+ public void setFiles( List files )
+ {
+ this.files = files;
+ }
+
+ public void setRepository( String repository )
+ {
+ this.repository = repository;
+ }
+
+ /**
+ * @noinspection RedundantIfStatement
+ */
+ public boolean equals( Object obj )
+ {
+ if ( this == obj )
+ {
+ return true;
+ }
+ if ( obj == null || getClass() != obj.getClass() )
+ {
+ return false;
+ }
+ if ( !super.equals( obj ) )
+ {
+ return false;
+ }
+
+ StandardArtifactIndexRecord that = (StandardArtifactIndexRecord) obj;
+
+ if ( !artifactId.equals( that.artifactId ) )
+ {
+ return false;
+ }
+ if ( classifier != null ? !classifier.equals( that.classifier ) : that.classifier != null )
+ {
+ return false;
+ }
+ if ( files != null ? !files.equals( that.files ) : that.files != null )
+ {
+ return false;
+ }
+ if ( !groupId.equals( that.groupId ) )
+ {
+ return false;
+ }
+ if ( repository != null ? !repository.equals( that.repository ) : that.repository != null )
+ {
+ return false;
+ }
+ if ( sha1Checksum != null ? !sha1Checksum.equals( that.sha1Checksum ) : that.sha1Checksum != null )
+ {
+ return false;
+ }
+ if ( type != null ? !type.equals( that.type ) : that.type != null )
+ {
+ return false;
+ }
+ if ( !version.equals( that.version ) )
+ {
+ return false;
+ }
+ if ( !baseVersion.equals( that.baseVersion ) )
+ {
+ return false;
+ }
+ if ( packaging != null ? !packaging.equals( that.packaging ) : that.packaging != null )
+ {
+ return false;
+ }
+ if ( pluginPrefix != null ? !pluginPrefix.equals( that.pluginPrefix ) : that.pluginPrefix != null )
+ {
+ return false;
+ }
+ if ( projectName != null ? !projectName.equals( that.projectName ) : that.projectName != null )
+ {
+ return false;
+ }
+ if ( inceptionYear != null ? !inceptionYear.equals( that.inceptionYear ) : that.inceptionYear != null )
+ {
+ return false;
+ }
+ if ( projectDescription != null ? !projectDescription.equals( that.projectDescription )
+ : that.projectDescription != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ public int hashCode()
+ {
+ int result = super.hashCode();
+ result = 31 * result + ( sha1Checksum != null ? sha1Checksum.hashCode() : 0 );
+ result = 31 * result + groupId.hashCode();
+ result = 31 * result + artifactId.hashCode();
+ result = 31 * result + version.hashCode();
+ result = 31 * result + baseVersion.hashCode();
+ result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
+ result = 31 * result + ( type != null ? type.hashCode() : 0 );
+ result = 31 * result + ( files != null ? files.hashCode() : 0 );
+ result = 31 * result + ( repository != null ? repository.hashCode() : 0 );
+ result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
+ result = 31 * result + ( pluginPrefix != null ? pluginPrefix.hashCode() : 0 );
+ result = 31 * result + ( inceptionYear != null ? inceptionYear.hashCode() : 0 );
+ result = 31 * result + ( projectName != null ? projectName.hashCode() : 0 );
+ result = 31 * result + ( projectDescription != null ? projectDescription.hashCode() : 0 );
+ return result;
+ }
+
+ public String getSha1Checksum()
+ {
+ return sha1Checksum;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public String getClassifier()
+ {
+ return classifier;
+ }
+
+ public String getType()
+ {
+ return type;
+ }
+
+ public List getFiles()
+ {
+ return files;
+ }
+
+ public String getRepository()
+ {
+ return repository;
+ }
+
+ public String getPackaging()
+ {
+ return packaging;
+ }
+
+ public String getPluginPrefix()
+ {
+ return pluginPrefix;
+ }
+
+ public void setPackaging( String packaging )
+ {
+ this.packaging = packaging;
+ }
+
+ public void setPluginPrefix( String pluginPrefix )
+ {
+ this.pluginPrefix = pluginPrefix;
+ }
+
+ public void setInceptionYear( String inceptionYear )
+ {
+ this.inceptionYear = inceptionYear;
+ }
+
+ public void setProjectDescription( String description )
+ {
+ this.projectDescription = description;
+ }
+
+ public void setProjectName( String projectName )
+ {
+ this.projectName = projectName;
+ }
+
+ public String getInceptionYear()
+ {
+ return inceptionYear;
+ }
+
+ public String getProjectDescription()
+ {
+ return projectDescription;
+ }
+
+ public String getProjectName()
+ {
+ return projectName;
+ }
+
+ public void setBaseVersion( String baseVersion )
+ {
+ this.baseVersion = baseVersion;
+ }
+
+ public String getBaseVersion()
+ {
+ return baseVersion;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+import org.codehaus.plexus.util.xml.Xpp3Dom;
+import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipException;
+import java.util.zip.ZipFile;
+
+/**
+ * An index record type for the standard index.
+ *
+ * @author Edwin Punzalan
+ * @author Brett Porter
+ * @plexus.component role="org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory" role-hint="standard"
+ */
+public class StandardArtifactIndexRecordFactory
+ extends AbstractArtifactIndexRecordFactory
+{
+ /**
+ * A list of artifact types to treat as a zip archive.
+ *
+ * @todo this should be smarter (perhaps use plexus archiver to look for an unarchiver, and make the ones for zip configurable since sar, par, etc can be added at random.
+ */
+ private static final Set ARCHIVE_TYPES =
+ new HashSet( Arrays.asList( new String[]{"jar", "ejb", "par", "sar", "war", "ear", "rar"} ) );
+
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ protected Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ protected Digester md5Digester;
+
+ private static final String PLUGIN_METADATA_NAME = "META-INF/maven/plugin.xml";
+
+ private static final String ARCHETYPE_METADATA_NAME = "META-INF/maven/archetype.xml";
+
+ // some current/old archetypes have the archetype.xml at different location.
+ private static final String ARCHETYPE_METADATA_NAME_OLD = "META-INF/archetype.xml";
+
+ public RepositoryIndexRecord createRecord( Artifact artifact )
+ throws RepositoryIndexException
+ {
+ StandardArtifactIndexRecord record = null;
+
+ File file = artifact.getFile();
+
+ // TODO: is this condition really a possibility?
+ if ( file != null && file.exists() )
+ {
+ String md5 = readChecksum( file, md5Digester );
+ String sha1 = readChecksum( file, sha1Digester );
+
+ List files = null;
+ boolean archive = ARCHIVE_TYPES.contains( artifact.getType() );
+ try
+ {
+ if ( archive )
+ {
+ files = readFilesInArchive( file );
+ }
+ }
+ catch ( IOException e )
+ {
+ getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
+ }
+
+ // If it's an archive with no files, don't create a record
+ if ( !archive || files != null )
+ {
+ record = new StandardArtifactIndexRecord();
+
+ record.setGroupId( artifact.getGroupId() );
+ record.setArtifactId( artifact.getArtifactId() );
+ record.setBaseVersion( artifact.getBaseVersion() );
+ record.setVersion( artifact.getVersion() );
+ record.setClassifier( artifact.getClassifier() );
+ record.setType( artifact.getType() );
+ record.setMd5Checksum( md5 );
+ record.setSha1Checksum( sha1 );
+ record.setFilename( artifact.getRepository().pathOf( artifact ) );
+ record.setLastModified( file.lastModified() );
+ record.setSize( file.length() );
+ record.setRepository( artifact.getRepository().getId() );
+ if ( files != null )
+ {
+ populateArchiveEntries( files, record, artifact.getFile() );
+ }
+
+ if ( !"pom".equals( artifact.getType() ) )
+ {
+ Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
+ artifact.getArtifactId(),
+ artifact.getVersion() );
+ pomArtifact.isSnapshot(); // gross hack around bug in maven-artifact
+ File pomFile = new File( artifact.getRepository().getBasedir(),
+ artifact.getRepository().pathOf( pomArtifact ) );
+ if ( pomFile.exists() )
+ {
+ try
+ {
+ populatePomEntries( readPom( pomArtifact, artifact.getRepository() ), record );
+ }
+ catch ( ProjectBuildingException e )
+ {
+ getLogger().error( "Error reading POM file, not populating in index: " + e.getMessage() );
+ }
+ }
+ }
+ else
+ {
+ Model model;
+ try
+ {
+ model = readPom( artifact, artifact.getRepository() );
+
+ if ( !"pom".equals( model.getPackaging() ) )
+ {
+ // Don't return a record for a POM that is does not belong on its own
+ record = null;
+ }
+ else
+ {
+ populatePomEntries( model, record );
+ }
+ }
+ catch ( ProjectBuildingException e )
+ {
+ getLogger().error( "Error reading POM file, not populating in index: " + e.getMessage() );
+ }
+ }
+ }
+ }
+
+ return record;
+ }
+
+ private void populatePomEntries( Model pom, StandardArtifactIndexRecord record )
+ {
+ record.setPackaging( pom.getPackaging() );
+ record.setProjectName( pom.getName() );
+ record.setProjectDescription( pom.getDescription() );
+ record.setInceptionYear( pom.getInceptionYear() );
+
+/* TODO: fields for later
+ indexPlugins( doc, FLD_PLUGINS_BUILD, pom.getBuild().getPlugins().iterator() );
+ indexReportPlugins( doc, FLD_PLUGINS_REPORT, pom.getReporting().getPlugins().iterator() );
+ record.setDependencies( dependencies );
+ record.setLicenses( licenses );
+*/
+ }
+
+ private Model readPom( Artifact artifact, ArtifactRepository repository )
+ throws RepositoryIndexException, ProjectBuildingException
+ {
+ // TODO: this can create a -SNAPSHOT.pom when it didn't exist and a timestamped one did. This is harmless, but should be avoided
+ // TODO: will this pollute with local repo metadata?
+ MavenProject project = projectBuilder.buildFromRepository( artifact, Collections.EMPTY_LIST, repository );
+ return project.getModel();
+ }
+
+ private void populateArchiveEntries( List files, StandardArtifactIndexRecord record, File artifactFile )
+ throws RepositoryIndexException
+ {
+ List classes = new ArrayList();
+ List fileList = new ArrayList();
+
+ for ( Iterator i = files.iterator(); i.hasNext(); )
+ {
+ String name = (String) i.next();
+
+ // ignore directories
+ if ( !name.endsWith( "/" ) )
+ {
+ fileList.add( name );
+
+ if ( isClass( name ) )
+ {
+ classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
+ }
+ else if ( PLUGIN_METADATA_NAME.equals( name ) )
+ {
+ populatePluginEntries( readXmlMetadataFileInJar( artifactFile, PLUGIN_METADATA_NAME ), record );
+ }
+ else if ( ARCHETYPE_METADATA_NAME.equals( name ) || ARCHETYPE_METADATA_NAME_OLD.equals( name ) )
+ {
+ populateArchetypeEntries( record );
+ }
+ }
+ }
+
+ if ( !classes.isEmpty() )
+ {
+ record.setClasses( classes );
+ }
+ if ( !fileList.isEmpty() )
+ {
+ record.setFiles( fileList );
+ }
+ }
+
+ private void populateArchetypeEntries( StandardArtifactIndexRecord record )
+ {
+ // Typically discovered as a JAR
+ record.setType( "maven-archetype" );
+ }
+
+ private Xpp3Dom readXmlMetadataFileInJar( File file, String name )
+ throws RepositoryIndexException
+ {
+ // TODO: would be more efficient with original ZipEntry still around
+
+ Xpp3Dom xpp3Dom;
+ ZipFile zipFile = null;
+ try
+ {
+ zipFile = new ZipFile( file );
+ ZipEntry entry = zipFile.getEntry( name );
+ xpp3Dom = Xpp3DomBuilder.build( new InputStreamReader( zipFile.getInputStream( entry ) ) );
+ }
+ catch ( ZipException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
+ }
+ finally
+ {
+ closeQuietly( zipFile );
+ }
+ return xpp3Dom;
+ }
+
+ public void populatePluginEntries( Xpp3Dom metadata, StandardArtifactIndexRecord record )
+ {
+ // Typically discovered as a JAR
+ record.setType( "maven-plugin" );
+
+ Xpp3Dom prefix = metadata.getChild( "goalPrefix" );
+
+ if ( prefix != null )
+ {
+ record.setPluginPrefix( prefix.getValue() );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * The fields in a minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo should be an enum
+ */
+public class StandardIndexRecordFields
+{
+ public static final String FILENAME = "filename";
+
+ public static final String GROUPID = "groupId";
+
+ public static final String GROUPID_EXACT = GROUPID + "_u";
+
+ public static final String ARTIFACTID = "artifactId";
+
+ public static final String ARTIFACTID_EXACT = ARTIFACTID + "_u";
+
+ public static final String VERSION = "version";
+
+ public static final String VERSION_EXACT = VERSION + "_u";
+
+ public static final String BASE_VERSION = "baseVersion";
+
+ public static final String BASE_VERSION_EXACT = BASE_VERSION + "_u";
+
+ public static final String TYPE = "type";
+
+ public static final String CLASSIFIER = "classifier";
+
+ public static final String PACKAGING = "packaging";
+
+ public static final String REPOSITORY = "repo";
+
+ public static final String LAST_MODIFIED = "lastModified";
+
+ public static final String FILE_SIZE = "fileSize";
+
+ public static final String MD5 = "md5";
+
+ public static final String SHA1 = "sha1";
+
+ public static final String CLASSES = "classes";
+
+ public static final String PLUGIN_PREFIX = "pluginPrefix";
+
+ public static final String FILES = "files";
+
+ public static final String INCEPTION_YEAR = "inceptionYear";
+
+ public static final String PROJECT_NAME = "projectName";
+
+ public static final String PROJECT_DESCRIPTION = "projectDesc";
+
+ private StandardIndexRecordFields()
+ {
+ // No touchy!
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.indexing;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.indexing.query.Query;
-
-import java.util.Collection;
-import java.util.List;
-
-/**
- * Maintain an artifact index on the repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryArtifactIndex
-{
- /**
- * Indexes the artifacts found within the specified list of index records. If the artifacts are already in the
- * repository they are updated.
- *
- * @param records the artifacts to index
- * @throws RepositoryIndexException if there is a problem indexing the records
- */
- void indexRecords( Collection records )
- throws RepositoryIndexException;
-
- /**
- * Search the index based on the search criteria specified. Returns a list of index records.
- *
- * @param query The query that contains the search criteria
- * @return the index records found
- * @throws RepositoryIndexSearchException if there is a problem searching
- * @todo should it return "SearchResult" instances that contain the index record and other search data (like score?)
- */
- List search( Query query )
- throws RepositoryIndexSearchException;
-
- /**
- * Check if the index already exists.
- *
- * @return true if the index already exists
- * @throws RepositoryIndexException if the index location is not valid
- */
- boolean exists()
- throws RepositoryIndexException;
-
- /**
- * Delete records from the index. Simply ignore the request any did not exist.
- *
- * @param records the records to delete
- * @throws RepositoryIndexException if there is a problem removing the record
- */
- void deleteRecords( Collection records )
- throws RepositoryIndexException;
-}
+++ /dev/null
-package org.apache.maven.repository.indexing;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.File;
-
-/**
- * Obtain an index instance.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryArtifactIndexFactory
-{
- /**
- * Plexus role.
- */
- String ROLE = RepositoryArtifactIndexFactory.class.getName();
-
- /**
- * Method to create an instance of the standard index.
- *
- * @param indexPath the path where the index will be created/updated
- * @return the index instance
- */
- RepositoryArtifactIndex createStandardIndex( File indexPath );
-
- /**
- * Method to create an instance of the minimal index.
- *
- * @param indexPath the path where the index will be created/updated
- * @return the index instance
- */
- RepositoryArtifactIndex createMinimalIndex( File indexPath );
-}
+++ /dev/null
-package org.apache.maven.repository.indexing;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class RepositoryIndexException
- extends Exception
-{
- public RepositoryIndexException( String message, Throwable cause )
- {
- super( message, cause );
- }
-
- public RepositoryIndexException( String message )
- {
- super( message );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Brett Porter
- */
-public class RepositoryIndexSearchException
- extends Exception
-{
- public RepositoryIndexSearchException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.Document;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-
-import java.text.ParseException;
-
-/**
- * Converts repository records to Lucene documents.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface LuceneIndexRecordConverter
-{
- /**
- * Convert an index record to a Lucene document.
- *
- * @param record the record
- * @return the document
- */
- Document convert( RepositoryIndexRecord record );
-
- /**
- * Convert a Lucene document to an index record.
- *
- * @param document the document
- * @return the record
- * @throws java.text.ParseException if there is a problem parsing a field (specifically, dates)
- */
- RepositoryIndexRecord convert( Document document )
- throws ParseException;
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.DateTools;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumberTools;
-import org.apache.maven.repository.indexing.record.MinimalArtifactIndexRecord;
-import org.apache.maven.repository.indexing.record.MinimalIndexRecordFields;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.text.ParseException;
-import java.util.Arrays;
-
-/**
- * Convert the minimal index record to a Lucene document.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneMinimalIndexRecordConverter
- implements LuceneIndexRecordConverter
-{
- public Document convert( RepositoryIndexRecord record )
- {
- MinimalArtifactIndexRecord rec = (MinimalArtifactIndexRecord) record;
-
- Document document = new Document();
- addTokenizedField( document, MinimalIndexRecordFields.FILENAME, rec.getFilename() );
- addUntokenizedField( document, MinimalIndexRecordFields.LAST_MODIFIED,
- DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
- addUntokenizedField( document, MinimalIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
- addUntokenizedField( document, MinimalIndexRecordFields.MD5, rec.getMd5Checksum() );
- addTokenizedField( document, MinimalIndexRecordFields.CLASSES,
- StringUtils.join( rec.getClasses().iterator(), "\n" ) );
-
- return document;
- }
-
- public RepositoryIndexRecord convert( Document document )
- throws ParseException
- {
- MinimalArtifactIndexRecord record = new MinimalArtifactIndexRecord();
-
- record.setFilename( document.get( MinimalIndexRecordFields.FILENAME ) );
- record.setLastModified( DateTools.stringToTime( document.get( MinimalIndexRecordFields.LAST_MODIFIED ) ) );
- record.setSize( NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
- record.setMd5Checksum( document.get( MinimalIndexRecordFields.MD5 ) );
- record.setClasses( Arrays.asList( document.get( MinimalIndexRecordFields.CLASSES ).split( "\n" ) ) );
-
- return record;
- }
-
- private static void addUntokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addTokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.indexing.query.Query;
-
-/**
- * A holder for a lucene query to pass to the indexer API.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneQuery
- implements Query
-{
- private final org.apache.lucene.search.Query query;
-
- public LuceneQuery( org.apache.lucene.search.Query query )
- {
- this.query = query;
- }
-
- org.apache.lucene.search.Query getLuceneQuery()
- {
- return query;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Hits;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.query.Query;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Lucene implementation of a repository index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneRepositoryArtifactIndex
- implements RepositoryArtifactIndex
-{
- /**
- * The location of the index on the file system.
- */
- private File indexLocation;
-
- /**
- * Convert repository records to Lucene documents.
- */
- private LuceneIndexRecordConverter converter;
-
- private static final String FLD_PK = "pk";
-
- public LuceneRepositoryArtifactIndex( File indexPath, LuceneIndexRecordConverter converter )
- {
- this.indexLocation = indexPath;
- this.converter = converter;
- }
-
- public void indexRecords( Collection records )
- throws RepositoryIndexException
- {
- deleteRecords( records );
-
- addRecords( records );
- }
-
- private void addRecords( Collection records )
- throws RepositoryIndexException
- {
- IndexWriter indexWriter;
- try
- {
- indexWriter = new IndexWriter( indexLocation, getAnalyzer(), !exists() );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Unable to open index", e );
- }
-
- try
- {
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- RepositoryIndexRecord record = (RepositoryIndexRecord) i.next();
-
- if ( record != null )
- {
- Document document = converter.convert( record );
- document.add(
- new Field( FLD_PK, record.getPrimaryKey(), Field.Store.NO, Field.Index.UN_TOKENIZED ) );
-
- indexWriter.addDocument( document );
- }
- }
-
- indexWriter.optimize();
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Failed to add an index document", e );
- }
- finally
- {
- close( indexWriter );
- }
- }
-
- private void close( IndexWriter indexWriter )
- throws RepositoryIndexException
- {
- try
- {
- if ( indexWriter != null )
- {
- indexWriter.close();
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( e.getMessage(), e );
- }
- }
-
- private Analyzer getAnalyzer()
- {
- // TODO: investigate why changed in original! Probably for MD5 and number querying.
- return new StandardAnalyzer();
- }
-
- public void deleteRecords( Collection records )
- throws RepositoryIndexException
- {
- if ( exists() )
- {
- IndexReader indexReader = null;
- try
- {
- indexReader = IndexReader.open( indexLocation );
-
- for ( Iterator artifacts = records.iterator(); artifacts.hasNext(); )
- {
- RepositoryIndexRecord record = (RepositoryIndexRecord) artifacts.next();
-
- if ( record != null )
- {
- Term term = new Term( FLD_PK, record.getPrimaryKey() );
-
- indexReader.deleteDocuments( term );
- }
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Error deleting document: " + e.getMessage(), e );
- }
- finally
- {
- if ( indexReader != null )
- {
- closeQuietly( indexReader );
- }
- }
- }
- }
-
- public boolean exists()
- throws RepositoryIndexException
- {
- if ( IndexReader.indexExists( indexLocation ) )
- {
- return true;
- }
- else if ( !indexLocation.exists() )
- {
- return false;
- }
- else if ( indexLocation.isDirectory() )
- {
- if ( indexLocation.listFiles().length > 1 )
- {
- throw new RepositoryIndexException( indexLocation + " is not a valid index directory." );
- }
- else
- {
- return false;
- }
- }
- else
- {
- throw new RepositoryIndexException( indexLocation + " is not a directory." );
- }
- }
-
- public List search( Query query )
- throws RepositoryIndexSearchException
- {
- LuceneQuery lQuery = (LuceneQuery) query;
-
- org.apache.lucene.search.Query luceneQuery = lQuery.getLuceneQuery();
-
- IndexSearcher searcher;
- try
- {
- searcher = new IndexSearcher( indexLocation.getAbsolutePath() );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to open index: " + e.getMessage(), e );
- }
-
- List records = new ArrayList();
- try
- {
- Hits hits = searcher.search( luceneQuery );
- for ( int i = 0; i < hits.length(); i++ )
- {
- Document doc = hits.doc( i );
-
- records.add( converter.convert( doc ) );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
- }
- catch ( ParseException e )
- {
- throw new RepositoryIndexSearchException( "Unable to search index: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( searcher );
- }
-
- return records;
- }
-
- private static void closeQuietly( IndexSearcher searcher )
- {
- try
- {
- if ( searcher != null )
- {
- searcher.close();
- }
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
-
- private static void closeQuietly( IndexReader reader )
- {
- try
- {
- if ( reader != null )
- {
- reader.close();
- }
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-
-import java.io.File;
-
-/**
- * Factory for Lucene artifact index instances.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory" role-hint="lucene"
- */
-public class LuceneRepositoryArtifactIndexFactory
- implements RepositoryArtifactIndexFactory
-{
- public RepositoryArtifactIndex createStandardIndex( File indexPath )
- {
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneStandardIndexRecordConverter() );
- }
-
- public RepositoryArtifactIndex createMinimalIndex( File indexPath )
- {
- return new LuceneRepositoryArtifactIndex( indexPath, new LuceneMinimalIndexRecordConverter() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.document.DateTools;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.NumberTools;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-import org.apache.maven.repository.indexing.record.StandardArtifactIndexRecord;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.text.ParseException;
-import java.util.Arrays;
-
-/**
- * Convert the standard index record to a Lucene document.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneStandardIndexRecordConverter
- implements LuceneIndexRecordConverter
-{
- public Document convert( RepositoryIndexRecord record )
- {
- StandardArtifactIndexRecord rec = (StandardArtifactIndexRecord) record;
-
- Document document = new Document();
- addTokenizedField( document, StandardIndexRecordFields.FILENAME, rec.getFilename() );
- addTokenizedField( document, StandardIndexRecordFields.GROUPID, rec.getGroupId() );
- addExactField( document, StandardIndexRecordFields.GROUPID_EXACT, rec.getGroupId() );
- addTokenizedField( document, StandardIndexRecordFields.ARTIFACTID, rec.getArtifactId() );
- addExactField( document, StandardIndexRecordFields.ARTIFACTID_EXACT, rec.getArtifactId() );
- addTokenizedField( document, StandardIndexRecordFields.VERSION, rec.getVersion() );
- addExactField( document, StandardIndexRecordFields.VERSION_EXACT, rec.getVersion() );
- addTokenizedField( document, StandardIndexRecordFields.BASE_VERSION, rec.getBaseVersion() );
- addExactField( document, StandardIndexRecordFields.BASE_VERSION_EXACT, rec.getBaseVersion() );
- addUntokenizedField( document, StandardIndexRecordFields.TYPE, rec.getType() );
- addTokenizedField( document, StandardIndexRecordFields.CLASSIFIER, rec.getClassifier() );
- addUntokenizedField( document, StandardIndexRecordFields.PACKAGING, rec.getPackaging() );
- addUntokenizedField( document, StandardIndexRecordFields.REPOSITORY, rec.getRepository() );
- addUntokenizedField( document, StandardIndexRecordFields.LAST_MODIFIED,
- DateTools.timeToString( rec.getLastModified(), DateTools.Resolution.SECOND ) );
- addUntokenizedField( document, StandardIndexRecordFields.FILE_SIZE, NumberTools.longToString( rec.getSize() ) );
- addUntokenizedField( document, StandardIndexRecordFields.MD5, rec.getMd5Checksum() );
- addUntokenizedField( document, StandardIndexRecordFields.SHA1, rec.getSha1Checksum() );
- if ( rec.getClasses() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.CLASSES,
- StringUtils.join( rec.getClasses().iterator(), "\n" ) );
- }
- if ( rec.getFiles() != null )
- {
- addTokenizedField( document, StandardIndexRecordFields.FILES,
- StringUtils.join( rec.getFiles().iterator(), "\n" ) );
- }
- addUntokenizedField( document, StandardIndexRecordFields.PLUGIN_PREFIX, rec.getPluginPrefix() );
- addUntokenizedField( document, StandardIndexRecordFields.INCEPTION_YEAR, rec.getInceptionYear() );
- addTokenizedField( document, StandardIndexRecordFields.PROJECT_NAME, rec.getProjectName() );
- addTokenizedField( document, StandardIndexRecordFields.PROJECT_DESCRIPTION, rec.getProjectDescription() );
-/* TODO: add later
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_LICENSE_URLS, "" ) );
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_DEPENDENCIES, "" ) );
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_REPORT, "" ) );
- document.add( Field.Keyword( StandardIndexRecordFields.FLD_PLUGINS_BUILD, "" ) );
-*/
-
- return document;
- }
-
- public RepositoryIndexRecord convert( Document document )
- throws ParseException
- {
- StandardArtifactIndexRecord record = new StandardArtifactIndexRecord();
-
- record.setFilename( document.get( StandardIndexRecordFields.FILENAME ) );
- record.setGroupId( document.get( StandardIndexRecordFields.GROUPID ) );
- record.setArtifactId( document.get( StandardIndexRecordFields.ARTIFACTID ) );
- record.setVersion( document.get( StandardIndexRecordFields.VERSION ) );
- record.setBaseVersion( document.get( StandardIndexRecordFields.BASE_VERSION ) );
- record.setType( document.get( StandardIndexRecordFields.TYPE ) );
- record.setClassifier( document.get( StandardIndexRecordFields.CLASSIFIER ) );
- record.setPackaging( document.get( StandardIndexRecordFields.PACKAGING ) );
- record.setRepository( document.get( StandardIndexRecordFields.REPOSITORY ) );
- record.setLastModified( DateTools.stringToTime( document.get( StandardIndexRecordFields.LAST_MODIFIED ) ) );
- record.setSize( NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
- record.setMd5Checksum( document.get( StandardIndexRecordFields.MD5 ) );
- record.setSha1Checksum( document.get( StandardIndexRecordFields.SHA1 ) );
- String classes = document.get( StandardIndexRecordFields.CLASSES );
- if ( classes != null )
- {
- record.setClasses( Arrays.asList( classes.split( "\n" ) ) );
- }
- String files = document.get( StandardIndexRecordFields.FILES );
- if ( files != null )
- {
- record.setFiles( Arrays.asList( files.split( "\n" ) ) );
- }
- record.setPluginPrefix( document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- record.setInceptionYear( document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- record.setProjectName( document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- record.setProjectDescription( document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
-
- return record;
- }
-
- private static void addUntokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addExactField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.NO, Field.Index.UN_TOKENIZED ) );
- }
- }
-
- private static void addTokenizedField( Document document, String name, String value )
- {
- if ( value != null )
- {
- document.add( new Field( name, value, Field.Store.YES, Field.Index.TOKENIZED ) );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Class to hold multiple SinglePhraseQueries and/or other CompoundQueries.
- *
- * @author Edwin Punzalan
- */
-public class CompoundQuery
- implements Query
-{
- /**
- * The query terms.
- */
- private final List compoundQueryTerms = new ArrayList();
-
- /**
- * Appends a required term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void and( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.and( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends an optional term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void or( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.or( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends a prohibited term to this query.
- *
- * @param term the term to be appended to this query
- */
- public void not( QueryTerm term )
- {
- compoundQueryTerms.add( CompoundQueryTerm.not( new SingleTermQuery( term ) ) );
- }
-
- /**
- * Appends a required subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void and( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.and( query ) );
- }
-
- /**
- * Appends an optional subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void or( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.or( query ) );
- }
-
- /**
- * Appends a prohibited subquery to this query.
- *
- * @param query the subquery to be appended to this query
- */
- public void not( Query query )
- {
- compoundQueryTerms.add( CompoundQueryTerm.not( query ) );
- }
-
- /**
- * Method to get the List of Queries appended into this
- *
- * @return List of all Queries added to this Query
- */
- public List getCompoundQueryTerms()
- {
- return compoundQueryTerms;
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Base of all query terms.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class CompoundQueryTerm
-{
- /**
- * The query to add to the compound query.
- */
- private final Query query;
-
- /**
- * Whether the term is required (an AND).
- */
- private final boolean required;
-
- /**
- * Whether the term is prohibited (a NOT).
- */
- private final boolean prohibited;
-
- /**
- * Class constructor
- *
- * @param query the subquery to add
- * @param required whether the term is required (an AND)
- * @param prohibited whether the term is prohibited (a NOT)
- */
- private CompoundQueryTerm( Query query, boolean required, boolean prohibited )
- {
- this.query = query;
- this.prohibited = prohibited;
- this.required = required;
- }
-
- /**
- * Method to test if the Query is a search requirement
- *
- * @return true if this Query is a search requirement, otherwise returns false
- */
- public boolean isRequired()
- {
- return required;
- }
-
- /**
- * Method to test if the Query is prohibited in the search result
- *
- * @return true if this Query is prohibited in the search result
- */
- public boolean isProhibited()
- {
- return prohibited;
- }
-
-
- /**
- * The subquery to execute.
- *
- * @return the query
- */
- public Query getQuery()
- {
- return query;
- }
-
- static CompoundQueryTerm and( Query query )
- {
- return new CompoundQueryTerm( query, true, false );
- }
-
- static CompoundQueryTerm or( Query query )
- {
- return new CompoundQueryTerm( query, false, false );
- }
-
- static CompoundQueryTerm not( Query query )
- {
- return new CompoundQueryTerm( query, false, true );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Interface to label the query classes
- *
- * @author Edwin Punzalan
- */
-public interface Query
-{
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Class to hold a single field search condition
- *
- * @author Edwin Punzalan
- */
-public class QueryTerm
-{
- private String field;
-
- private String value;
-
- /**
- * Class constructor
- *
- * @param field the index field to search
- * @param value the index value requirement
- */
- public QueryTerm( String field, String value )
- {
- this.field = field;
- this.value = value;
- }
-
- /**
- * Method to retrieve the name of the index field searched
- *
- * @return the name of the index field
- */
- public String getField()
- {
- return field;
- }
-
- /**
- * Method to retrieve the value used in searching the index field
- *
- * @return the value to corresspond the index field
- */
- public String getValue()
- {
- return value;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;\r
-\r
-/*\r
- * Copyright 2005-2006 The Apache Software Foundation.\r
- *\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- * http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- */\r
-\r
-/**\r
- * Query object that handles range queries (presently used for dates).\r
- *\r
- * @author Maria Odea Ching\r
- * @author Brett Porter\r
- */\r
-public class RangeQuery\r
- implements Query\r
-{\r
- /**\r
- * Whether values equal to the boundaries are included in the query results.\r
- */\r
- private final boolean inclusive;\r
-\r
- /**\r
- * The lower bound.\r
- */\r
- private final QueryTerm begin;\r
-\r
- /**\r
- * The upper bound.\r
- */\r
- private final QueryTerm end;\r
-\r
- /**\r
- * Constructor.\r
- *\r
- * @param begin the lower bound\r
- * @param end the upper bound\r
- * @param inclusive whether to include the boundaries in the query\r
- */\r
- private RangeQuery( QueryTerm begin, QueryTerm end, boolean inclusive )\r
- {\r
- this.begin = begin;\r
- this.end = end;\r
- this.inclusive = inclusive;\r
- }\r
-\r
- /**\r
- * Create an open range, including all results.\r
- *\r
- * @return the query object\r
- */\r
- public static RangeQuery createOpenRange()\r
- {\r
- return new RangeQuery( null, null, false );\r
- }\r
-\r
- /**\r
- * Create a bounded range, excluding the endpoints.\r
- *\r
- * @param begin the lower bound value to compare to\r
- * @param end the upper bound value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createExclusiveRange( QueryTerm begin, QueryTerm end )\r
- {\r
- return new RangeQuery( begin, end, false );\r
- }\r
-\r
- /**\r
- * Create a bounded range, including the endpoints.\r
- *\r
- * @param begin the lower bound value to compare to\r
- * @param end the upper bound value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createInclusiveRange( QueryTerm begin, QueryTerm end )\r
- {\r
- return new RangeQuery( begin, end, true );\r
- }\r
-\r
- /**\r
- * Create a range that is greater than or equal to a given term.\r
- *\r
- * @param begin the value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createGreaterThanOrEqualToRange( QueryTerm begin )\r
- {\r
- return new RangeQuery( begin, null, true );\r
- }\r
-\r
- /**\r
- * Create a range that is greater than a given term.\r
- *\r
- * @param begin the value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createGreaterThanRange( QueryTerm begin )\r
- {\r
- return new RangeQuery( begin, null, false );\r
- }\r
-\r
- /**\r
- * Create a range that is less than or equal to a given term.\r
- *\r
- * @param end the value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createLessThanOrEqualToRange( QueryTerm end )\r
- {\r
- return new RangeQuery( null, end, true );\r
- }\r
-\r
- /**\r
- * Create a range that is less than a given term.\r
- *\r
- * @param end the value to compare to\r
- * @return the query object\r
- */\r
- public static RangeQuery createLessThanRange( QueryTerm end )\r
- {\r
- return new RangeQuery( null, end, false );\r
- }\r
-\r
- public QueryTerm getBegin()\r
- {\r
- return begin;\r
- }\r
-\r
- public QueryTerm getEnd()\r
- {\r
- return end;\r
- }\r
-\r
- public boolean isInclusive()\r
- {\r
- return inclusive;\r
- }\r
-\r
-}\r
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Query for a single term.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class SingleTermQuery
- implements Query
-{
- /**
- * The term to query for.
- */
- private final QueryTerm term;
-
- /**
- * Constructor.
- *
- * @param term the term to query
- */
- public SingleTermQuery( QueryTerm term )
- {
- this.term = term;
- }
-
- /**
- * Shorthand constructor - create a single term query from a field and value
- *
- * @param field the field name
- * @param value the value to check for
- */
- public SingleTermQuery( String field, String value )
- {
- this.term = new QueryTerm( field, value );
- }
-
- public String getField()
- {
- return term.getField();
- }
-
- public String getValue()
- {
- return term.getValue();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-
-/**
- * Base class for the index record factories.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractArtifactIndexRecordFactory
- extends AbstractLogEnabled
- implements RepositoryIndexRecordFactory
-{
- protected String readChecksum( File file, Digester digester )
- {
- String checksum;
- try
- {
- checksum = digester.calc( file ).toLowerCase();
- }
- catch ( DigesterException e )
- {
- getLogger().error( "Error getting checksum for artifact file, leaving empty in index: " + e.getMessage() );
- checksum = null;
- }
- return checksum;
- }
-
- protected List readFilesInArchive( File file )
- throws IOException
- {
- ZipFile zipFile = new ZipFile( file );
- List files;
- try
- {
- files = new ArrayList( zipFile.size() );
-
- for ( Enumeration entries = zipFile.entries(); entries.hasMoreElements(); )
- {
- ZipEntry entry = (ZipEntry) entries.nextElement();
-
- files.add( entry.getName() );
- }
- }
- finally
- {
- closeQuietly( zipFile );
- }
- return files;
- }
-
- protected static boolean isClass( String name )
- {
- // TODO: verify if class is public or protected (this might require the original ZipEntry)
- return name.endsWith( ".class" ) && name.lastIndexOf( "$" ) < 0;
- }
-
- protected static void closeQuietly( ZipFile zipFile )
- {
- try
- {
- if ( zipFile != null )
- {
- zipFile.close();
- }
- }
- catch ( IOException e )
- {
- // ignored
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-import java.util.Date;
-import java.util.List;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * The a record with the fields in the minimal index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class MinimalArtifactIndexRecord
- implements RepositoryIndexRecord
-{
- /**
- * The classes in the archive for the artifact, if it is a JAR.
- */
- private List classes;
-
- /**
- * The MD5 checksum of the artifact file.
- */
- private String md5Checksum;
-
- /**
- * The filename of the artifact file (no path).
- */
- private String filename;
-
- /**
- * The timestamp that the artifact file was last modified. Granularity is seconds.
- */
- private long lastModified;
-
- /**
- * The size of the artifact file in bytes.
- */
- private long size;
-
- private static final int MS_PER_SEC = 1000;
-
- public void setClasses( List classes )
- {
- this.classes = classes;
- }
-
- public void setMd5Checksum( String md5Checksum )
- {
- this.md5Checksum = md5Checksum;
- }
-
- public void setFilename( String filename )
- {
- this.filename = filename;
- }
-
- public void setLastModified( long lastModified )
- {
- this.lastModified = lastModified - lastModified % MS_PER_SEC;
- }
-
- public void setSize( long size )
- {
- this.size = size;
- }
-
- public List getClasses()
- {
- return classes;
- }
-
- public String getMd5Checksum()
- {
- return md5Checksum;
- }
-
- public String getFilename()
- {
- return filename;
- }
-
- public long getLastModified()
- {
- return lastModified;
- }
-
- public long getSize()
- {
- return size;
- }
-
- /**
- * @noinspection RedundantIfStatement
- */
- public boolean equals( Object obj )
- {
- if ( this == obj )
- {
- return true;
- }
- if ( obj == null || getClass() != obj.getClass() )
- {
- return false;
- }
-
- MinimalArtifactIndexRecord that = (MinimalArtifactIndexRecord) obj;
-
- if ( lastModified != that.lastModified )
- {
- return false;
- }
- if ( size != that.size )
- {
- return false;
- }
- if ( classes != null ? !classes.equals( that.classes ) : that.classes != null )
- {
- return false;
- }
- if ( !filename.equals( that.filename ) )
- {
- return false;
- }
- if ( md5Checksum != null ? !md5Checksum.equals( that.md5Checksum ) : that.md5Checksum != null )
- {
- return false;
- }
-
- return true;
- }
-
- /**
- * @noinspection UnnecessaryParentheses
- */
- public int hashCode()
- {
- int result = classes != null ? classes.hashCode() : 0;
- result = 31 * result + ( md5Checksum != null ? md5Checksum.hashCode() : 0 );
- result = 31 * result + filename.hashCode();
- result = 31 * result + (int) ( lastModified ^ ( lastModified >>> 32 ) );
- result = 31 * result + (int) ( size ^ ( size >>> 32 ) );
- return result;
- }
-
- public String toString()
- {
- return "Filename: " + filename + "; checksum: " + md5Checksum + "; size: " + size + "; lastModified: " +
- new Date( lastModified ) + "; classes: " + classes;
- }
-
- public String getPrimaryKey()
- {
- return filename;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.repository.digest.Digester;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-/**
- * An index record type for the minimal index.
- *
- * @author Edwin Punzalan
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory" role-hint="minimal"
- */
-public class MinimalArtifactIndexRecordFactory
- extends AbstractArtifactIndexRecordFactory
-{
- /* List of types to index. */
- private static final Set INDEXED_TYPES = new HashSet( Arrays.asList( new String[]{"jar", "maven-plugin"} ) );
- /**
- * @plexus.requirement role-hint="sha1"
- */
- protected Digester sha1Digester;
- /**
- * @plexus.requirement role-hint="md5"
- */
- protected Digester md5Digester;
-
- public RepositoryIndexRecord createRecord( Artifact artifact )
- {
- MinimalArtifactIndexRecord record = null;
-
- File file = artifact.getFile();
- if ( file != null && INDEXED_TYPES.contains( artifact.getType() ) && file.exists() )
- {
- String md5 = readChecksum( file, md5Digester );
-
- List files = null;
- try
- {
- files = readFilesInArchive( file );
- }
- catch ( IOException e )
- {
- getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
- }
-
- if ( files != null )
- {
- record = new MinimalArtifactIndexRecord();
- record.setMd5Checksum( md5 );
- record.setFilename( artifact.getRepository().pathOf( artifact ) );
- record.setLastModified( file.lastModified() );
- record.setSize( file.length() );
- record.setClasses( getClassesFromFiles( files ) );
- }
- }
- return record;
- }
-
- private List getClassesFromFiles( List files )
- {
- List classes = new ArrayList();
-
- for ( Iterator i = files.iterator(); i.hasNext(); )
- {
- String name = (String) i.next();
-
- if ( isClass( name ) )
- {
- classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
- }
- }
-
- return classes;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * The fields in a minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should be an enum
- */
-public class MinimalIndexRecordFields
-{
- public static final String FILENAME = "j";
-
- public static final String LAST_MODIFIED = "d";
-
- public static final String FILE_SIZE = "s";
-
- public static final String MD5 = "m";
-
- public static final String CLASSES = "c";
-
- private MinimalIndexRecordFields()
- {
- // No touchy!
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * A repository index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryIndexRecord
-{
- /**
- * Get the primary key used to identify the record uniquely in the index.
- *
- * @return the primary key
- */
- String getPrimaryKey();
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-
-/**
- * The layout of a record in a repository index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryIndexRecordFactory
-{
- /**
- * The Plexus role.
- */
- String ROLE = RepositoryIndexRecordFactory.class.getName();
-
- /**
- * Create an index record from an artifact.
- *
- * @param artifact the artifact
- * @return the index record
- * @throws RepositoryIndexException if there is a problem constructing the record (due to not being able to read the artifact file as a POM)
- */
- RepositoryIndexRecord createRecord( Artifact artifact )
- throws RepositoryIndexException;
-
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-import java.util.List;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * The a record with the fields in the standard index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class StandardArtifactIndexRecord
- extends MinimalArtifactIndexRecord
-{
- /**
- * The SHA-1 checksum of the artifact file.
- */
- private String sha1Checksum;
-
- /**
- * The artifact's group.
- */
- private String groupId;
-
- /**
- * The artifact's identifier within the group.
- */
- private String artifactId;
-
- /**
- * The artifact's version.
- */
- private String version;
-
- /**
- * The classifier, if there is one.
- */
- private String classifier;
-
- /**
- * The artifact type (from the file).
- */
- private String type;
-
- /**
- * A list of files (separated by '\n') in the artifact if it is an archive.
- */
- private List files;
-
- /**
- * The identifier of the repository that the artifact came from.
- */
- private String repository;
-
- /**
- * The packaging specified in the POM for this artifact.
- */
- private String packaging;
-
- /**
- * The plugin prefix specified in the metadata if the artifact is a plugin.
- */
- private String pluginPrefix;
-
- /**
- * The year the project was started.
- */
- private String inceptionYear;
-
- /**
- * The description of the project.
- */
- private String projectDescription;
-
- /**
- * The name of the project.
- */
- private String projectName;
-
- /**
- * The base version (before the snapshot is determined).
- */
- private String baseVersion;
-
- public void setSha1Checksum( String sha1Checksum )
- {
- this.sha1Checksum = sha1Checksum;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public void setArtifactId( String artifactId )
- {
- this.artifactId = artifactId;
- }
-
- public void setVersion( String version )
- {
- this.version = version;
- }
-
- public void setClassifier( String classifier )
- {
- this.classifier = classifier;
- }
-
- public void setType( String type )
- {
- this.type = type;
- }
-
- public void setFiles( List files )
- {
- this.files = files;
- }
-
- public void setRepository( String repository )
- {
- this.repository = repository;
- }
-
- /**
- * @noinspection RedundantIfStatement
- */
- public boolean equals( Object obj )
- {
- if ( this == obj )
- {
- return true;
- }
- if ( obj == null || getClass() != obj.getClass() )
- {
- return false;
- }
- if ( !super.equals( obj ) )
- {
- return false;
- }
-
- StandardArtifactIndexRecord that = (StandardArtifactIndexRecord) obj;
-
- if ( !artifactId.equals( that.artifactId ) )
- {
- return false;
- }
- if ( classifier != null ? !classifier.equals( that.classifier ) : that.classifier != null )
- {
- return false;
- }
- if ( files != null ? !files.equals( that.files ) : that.files != null )
- {
- return false;
- }
- if ( !groupId.equals( that.groupId ) )
- {
- return false;
- }
- if ( repository != null ? !repository.equals( that.repository ) : that.repository != null )
- {
- return false;
- }
- if ( sha1Checksum != null ? !sha1Checksum.equals( that.sha1Checksum ) : that.sha1Checksum != null )
- {
- return false;
- }
- if ( type != null ? !type.equals( that.type ) : that.type != null )
- {
- return false;
- }
- if ( !version.equals( that.version ) )
- {
- return false;
- }
- if ( !baseVersion.equals( that.baseVersion ) )
- {
- return false;
- }
- if ( packaging != null ? !packaging.equals( that.packaging ) : that.packaging != null )
- {
- return false;
- }
- if ( pluginPrefix != null ? !pluginPrefix.equals( that.pluginPrefix ) : that.pluginPrefix != null )
- {
- return false;
- }
- if ( projectName != null ? !projectName.equals( that.projectName ) : that.projectName != null )
- {
- return false;
- }
- if ( inceptionYear != null ? !inceptionYear.equals( that.inceptionYear ) : that.inceptionYear != null )
- {
- return false;
- }
- if ( projectDescription != null ? !projectDescription.equals( that.projectDescription )
- : that.projectDescription != null )
- {
- return false;
- }
-
- return true;
- }
-
- public int hashCode()
- {
- int result = super.hashCode();
- result = 31 * result + ( sha1Checksum != null ? sha1Checksum.hashCode() : 0 );
- result = 31 * result + groupId.hashCode();
- result = 31 * result + artifactId.hashCode();
- result = 31 * result + version.hashCode();
- result = 31 * result + baseVersion.hashCode();
- result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
- result = 31 * result + ( type != null ? type.hashCode() : 0 );
- result = 31 * result + ( files != null ? files.hashCode() : 0 );
- result = 31 * result + ( repository != null ? repository.hashCode() : 0 );
- result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
- result = 31 * result + ( pluginPrefix != null ? pluginPrefix.hashCode() : 0 );
- result = 31 * result + ( inceptionYear != null ? inceptionYear.hashCode() : 0 );
- result = 31 * result + ( projectName != null ? projectName.hashCode() : 0 );
- result = 31 * result + ( projectDescription != null ? projectDescription.hashCode() : 0 );
- return result;
- }
-
- public String getSha1Checksum()
- {
- return sha1Checksum;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public String getVersion()
- {
- return version;
- }
-
- public String getClassifier()
- {
- return classifier;
- }
-
- public String getType()
- {
- return type;
- }
-
- public List getFiles()
- {
- return files;
- }
-
- public String getRepository()
- {
- return repository;
- }
-
- public String getPackaging()
- {
- return packaging;
- }
-
- public String getPluginPrefix()
- {
- return pluginPrefix;
- }
-
- public void setPackaging( String packaging )
- {
- this.packaging = packaging;
- }
-
- public void setPluginPrefix( String pluginPrefix )
- {
- this.pluginPrefix = pluginPrefix;
- }
-
- public void setInceptionYear( String inceptionYear )
- {
- this.inceptionYear = inceptionYear;
- }
-
- public void setProjectDescription( String description )
- {
- this.projectDescription = description;
- }
-
- public void setProjectName( String projectName )
- {
- this.projectName = projectName;
- }
-
- public String getInceptionYear()
- {
- return inceptionYear;
- }
-
- public String getProjectDescription()
- {
- return projectDescription;
- }
-
- public String getProjectName()
- {
- return projectName;
- }
-
- public void setBaseVersion( String baseVersion )
- {
- this.baseVersion = baseVersion;
- }
-
- public String getBaseVersion()
- {
- return baseVersion;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.codehaus.plexus.util.xml.Xpp3Dom;
-import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipException;
-import java.util.zip.ZipFile;
-
-/**
- * An index record type for the standard index.
- *
- * @author Edwin Punzalan
- * @author Brett Porter
- * @plexus.component role="org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory" role-hint="standard"
- */
-public class StandardArtifactIndexRecordFactory
- extends AbstractArtifactIndexRecordFactory
-{
- /**
- * A list of artifact types to treat as a zip archive.
- *
- * @todo this should be smarter (perhaps use plexus archiver to look for an unarchiver, and make the ones for zip configurable since sar, par, etc can be added at random.
- */
- private static final Set ARCHIVE_TYPES =
- new HashSet( Arrays.asList( new String[]{"jar", "ejb", "par", "sar", "war", "ear", "rar"} ) );
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- /**
- * @plexus.requirement role-hint="sha1"
- */
- protected Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- protected Digester md5Digester;
-
- private static final String PLUGIN_METADATA_NAME = "META-INF/maven/plugin.xml";
-
- private static final String ARCHETYPE_METADATA_NAME = "META-INF/maven/archetype.xml";
-
- // some current/old archetypes have the archetype.xml at different location.
- private static final String ARCHETYPE_METADATA_NAME_OLD = "META-INF/archetype.xml";
-
- public RepositoryIndexRecord createRecord( Artifact artifact )
- throws RepositoryIndexException
- {
- StandardArtifactIndexRecord record = null;
-
- File file = artifact.getFile();
-
- // TODO: is this condition really a possibility?
- if ( file != null && file.exists() )
- {
- String md5 = readChecksum( file, md5Digester );
- String sha1 = readChecksum( file, sha1Digester );
-
- List files = null;
- boolean archive = ARCHIVE_TYPES.contains( artifact.getType() );
- try
- {
- if ( archive )
- {
- files = readFilesInArchive( file );
- }
- }
- catch ( IOException e )
- {
- getLogger().error( "Error reading artifact file, omitting from index: " + e.getMessage() );
- }
-
- // If it's an archive with no files, don't create a record
- if ( !archive || files != null )
- {
- record = new StandardArtifactIndexRecord();
-
- record.setGroupId( artifact.getGroupId() );
- record.setArtifactId( artifact.getArtifactId() );
- record.setBaseVersion( artifact.getBaseVersion() );
- record.setVersion( artifact.getVersion() );
- record.setClassifier( artifact.getClassifier() );
- record.setType( artifact.getType() );
- record.setMd5Checksum( md5 );
- record.setSha1Checksum( sha1 );
- record.setFilename( artifact.getRepository().pathOf( artifact ) );
- record.setLastModified( file.lastModified() );
- record.setSize( file.length() );
- record.setRepository( artifact.getRepository().getId() );
- if ( files != null )
- {
- populateArchiveEntries( files, record, artifact.getFile() );
- }
-
- if ( !"pom".equals( artifact.getType() ) )
- {
- Artifact pomArtifact = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
- pomArtifact.isSnapshot(); // gross hack around bug in maven-artifact
- File pomFile = new File( artifact.getRepository().getBasedir(),
- artifact.getRepository().pathOf( pomArtifact ) );
- if ( pomFile.exists() )
- {
- try
- {
- populatePomEntries( readPom( pomArtifact, artifact.getRepository() ), record );
- }
- catch ( ProjectBuildingException e )
- {
- getLogger().error( "Error reading POM file, not populating in index: " + e.getMessage() );
- }
- }
- }
- else
- {
- Model model;
- try
- {
- model = readPom( artifact, artifact.getRepository() );
-
- if ( !"pom".equals( model.getPackaging() ) )
- {
- // Don't return a record for a POM that is does not belong on its own
- record = null;
- }
- else
- {
- populatePomEntries( model, record );
- }
- }
- catch ( ProjectBuildingException e )
- {
- getLogger().error( "Error reading POM file, not populating in index: " + e.getMessage() );
- }
- }
- }
- }
-
- return record;
- }
-
- private void populatePomEntries( Model pom, StandardArtifactIndexRecord record )
- {
- record.setPackaging( pom.getPackaging() );
- record.setProjectName( pom.getName() );
- record.setProjectDescription( pom.getDescription() );
- record.setInceptionYear( pom.getInceptionYear() );
-
-/* TODO: fields for later
- indexPlugins( doc, FLD_PLUGINS_BUILD, pom.getBuild().getPlugins().iterator() );
- indexReportPlugins( doc, FLD_PLUGINS_REPORT, pom.getReporting().getPlugins().iterator() );
- record.setDependencies( dependencies );
- record.setLicenses( licenses );
-*/
- }
-
- private Model readPom( Artifact artifact, ArtifactRepository repository )
- throws RepositoryIndexException, ProjectBuildingException
- {
- // TODO: this can create a -SNAPSHOT.pom when it didn't exist and a timestamped one did. This is harmless, but should be avoided
- // TODO: will this pollute with local repo metadata?
- MavenProject project = projectBuilder.buildFromRepository( artifact, Collections.EMPTY_LIST, repository );
- return project.getModel();
- }
-
- private void populateArchiveEntries( List files, StandardArtifactIndexRecord record, File artifactFile )
- throws RepositoryIndexException
- {
- List classes = new ArrayList();
- List fileList = new ArrayList();
-
- for ( Iterator i = files.iterator(); i.hasNext(); )
- {
- String name = (String) i.next();
-
- // ignore directories
- if ( !name.endsWith( "/" ) )
- {
- fileList.add( name );
-
- if ( isClass( name ) )
- {
- classes.add( name.substring( 0, name.length() - 6 ).replace( '/', '.' ) );
- }
- else if ( PLUGIN_METADATA_NAME.equals( name ) )
- {
- populatePluginEntries( readXmlMetadataFileInJar( artifactFile, PLUGIN_METADATA_NAME ), record );
- }
- else if ( ARCHETYPE_METADATA_NAME.equals( name ) || ARCHETYPE_METADATA_NAME_OLD.equals( name ) )
- {
- populateArchetypeEntries( record );
- }
- }
- }
-
- if ( !classes.isEmpty() )
- {
- record.setClasses( classes );
- }
- if ( !fileList.isEmpty() )
- {
- record.setFiles( fileList );
- }
- }
-
- private void populateArchetypeEntries( StandardArtifactIndexRecord record )
- {
- // Typically discovered as a JAR
- record.setType( "maven-archetype" );
- }
-
- private Xpp3Dom readXmlMetadataFileInJar( File file, String name )
- throws RepositoryIndexException
- {
- // TODO: would be more efficient with original ZipEntry still around
-
- Xpp3Dom xpp3Dom;
- ZipFile zipFile = null;
- try
- {
- zipFile = new ZipFile( file );
- ZipEntry entry = zipFile.getEntry( name );
- xpp3Dom = Xpp3DomBuilder.build( new InputStreamReader( zipFile.getInputStream( entry ) ) );
- }
- catch ( ZipException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- catch ( XmlPullParserException e )
- {
- throw new RepositoryIndexException( "Unable to read plugin metadata: " + e.getMessage(), e );
- }
- finally
- {
- closeQuietly( zipFile );
- }
- return xpp3Dom;
- }
-
- public void populatePluginEntries( Xpp3Dom metadata, StandardArtifactIndexRecord record )
- {
- // Typically discovered as a JAR
- record.setType( "maven-plugin" );
-
- Xpp3Dom prefix = metadata.getChild( "goalPrefix" );
-
- if ( prefix != null )
- {
- record.setPluginPrefix( prefix.getValue() );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * The fields in a minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo should be an enum
- */
-public class StandardIndexRecordFields
-{
- public static final String FILENAME = "filename";
-
- public static final String GROUPID = "groupId";
-
- public static final String GROUPID_EXACT = GROUPID + "_u";
-
- public static final String ARTIFACTID = "artifactId";
-
- public static final String ARTIFACTID_EXACT = ARTIFACTID + "_u";
-
- public static final String VERSION = "version";
-
- public static final String VERSION_EXACT = VERSION + "_u";
-
- public static final String BASE_VERSION = "baseVersion";
-
- public static final String BASE_VERSION_EXACT = BASE_VERSION + "_u";
-
- public static final String TYPE = "type";
-
- public static final String CLASSIFIER = "classifier";
-
- public static final String PACKAGING = "packaging";
-
- public static final String REPOSITORY = "repo";
-
- public static final String LAST_MODIFIED = "lastModified";
-
- public static final String FILE_SIZE = "fileSize";
-
- public static final String MD5 = "md5";
-
- public static final String SHA1 = "sha1";
-
- public static final String CLASSES = "classes";
-
- public static final String PLUGIN_PREFIX = "pluginPrefix";
-
- public static final String FILES = "files";
-
- public static final String INCEPTION_YEAR = "inceptionYear";
-
- public static final String PROJECT_NAME = "projectName";
-
- public static final String PROJECT_DESCRIPTION = "projectDesc";
-
- private StandardIndexRecordFields()
- {
- // No touchy!
- }
-}
* Reduced Size Index
An additional index is maintained by the repository manager in the
- {{{../apidocs/org/apache/maven/repository/indexing/MinimalArtifactIndexRecord.html} MinimalIndex}} class. This
+ {{{../apidocs/org/apache/maven/archiva/indexing/MinimalArtifactIndexRecord.html} MinimalIndex}} class. This
indexes all of the same artifacts as the first index, but stores them with shorter field names and less information to
maintain a smaller size. This index is appropriate for use by certain clients such as IDE integration for fast
searching. For a fuller interface to the repository information, the integration should use the XMLRPC interface.
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test the Lucene implementation of the artifact index search.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
+ */
+public class LuceneMinimalArtifactIndexSearchTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ private Map records = new HashMap();
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createMinimalIndex( indexLocation );
+
+ records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
+ records.put( "test-jar-jdk14",
+ recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
+ records.put( "test-jar-and-pom",
+ recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
+ records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
+ createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
+ records.put( "test-child-pom",
+ recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
+ records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
+ records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
+ records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
+ records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
+ records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
+
+ index.indexRecords( records.values() );
+ }
+
+ public void testExactMatchMd5()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( MinimalIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( MinimalIndexRecordFields.MD5, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFilename()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "maven" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+/* TODO: if this is a result we want, we need to change the analyzer. Currently, it is tokenizing it as plugin-1.0 and plugin/1.0 in the path
+ query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "plugin" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+*/
+ query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "test" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClass()
+ throws RepositoryIndexSearchException
+ {
+ // TODO: should be preserving case!
+ Query query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "b.c.c" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+/* TODO!: need to change the analyzer if we want partial classes (split on '.')
+ query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "C" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 4, results.size() );
+
+ query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "MyMojo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+*/
+
+ // test non-match fails
+ query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar", null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.NumberTools;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.record.MinimalIndexRecordFields;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+/**
+ * Test the Lucene implementation of the artifact index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneMinimalArtifactIndexTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createMinimalIndex( indexLocation );
+ }
+
+ public void testIndexExists()
+ throws IOException, RepositoryIndexException
+ {
+ assertFalse( "check index doesn't exist", index.exists() );
+
+ // create empty directory
+ indexLocation.mkdirs();
+ assertFalse( "check index doesn't exist even if directory does", index.exists() );
+
+ // create index, with no records
+ createEmptyIndex();
+ assertTrue( "check index is considered to exist", index.exists() );
+
+ // Test non-directory
+ FileUtils.deleteDirectory( indexLocation );
+ indexLocation.createNewFile();
+ try
+ {
+ index.exists();
+ fail( "Index operation should fail as the location is not valid" );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // great
+ }
+ finally
+ {
+ indexLocation.delete();
+ }
+ }
+
+ public void testAddRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertEquals( "Check document", repository.pathOf( artifact ),
+ document.get( MinimalIndexRecordFields.FILENAME ) );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordExistingEmptyIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ // Do it again
+ record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNotInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.deleteRecords( Collections.singleton( record ) );
+
+ assertFalse( index.exists() );
+ }
+
+ public void testAddPomRecord()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPlugin()
+ throws IOException, RepositoryIndexException, XmlPullParserException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertRecord( document, artifact, "3530896791670ebb45e17708e5d52c40",
+ "org.apache.maven.archiva.record.MyMojo" );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ private void createEmptyIndex()
+ throws IOException
+ {
+ createIndex( Collections.EMPTY_LIST );
+ }
+
+ private void createIndex( List docments )
+ throws IOException
+ {
+ IndexWriter writer = new IndexWriter( indexLocation, new StandardAnalyzer(), true );
+ for ( Iterator i = docments.iterator(); i.hasNext(); )
+ {
+ Document document = (Document) i.next();
+ writer.addDocument( document );
+ }
+ writer.optimize();
+ writer.close();
+ }
+
+ private void assertRecord( Document document, Artifact artifact, String expectedChecksum, String expectedClasses )
+ {
+ assertEquals( "Check document filename", repository.pathOf( artifact ),
+ document.get( MinimalIndexRecordFields.FILENAME ) );
+ assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
+ document.get( MinimalIndexRecordFields.LAST_MODIFIED ) );
+ assertEquals( "Check document checksum", expectedChecksum, document.get( MinimalIndexRecordFields.MD5 ) );
+ assertEquals( "Check document size", artifact.getFile().length(),
+ NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
+ assertEquals( "Check document classes", expectedClasses, document.get( MinimalIndexRecordFields.CLASSES ) );
+ }
+
+ private String getLastModified( File file )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
+ return dateFormat.format( new Date( file.lastModified() ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test the Lucene implementation of the artifact index search.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
+ */
+public class LuceneStandardArtifactIndexSearchTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ private Map records = new HashMap();
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createStandardIndex( indexLocation );
+
+ records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
+ records.put( "test-jar-jdk14",
+ recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
+ records.put( "test-jar-and-pom",
+ recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
+ records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
+ createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
+ records.put( "test-child-pom",
+ recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
+ records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
+ records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
+ records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
+ records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
+ records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
+
+ index.indexRecords( records.values() );
+ }
+
+ public void testExactMatchVersion()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0-SNAPSHOT" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0-20060728.121314-1" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchBaseVersion()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-SNAPSHOT" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-20060728.121314-1" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchGroupId()
+ throws RepositoryIndexSearchException
+ {
+ Query query =
+ new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven.archiva.record" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ // test partial match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchArtifactId()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "test-jar" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test partial match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "test" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchType()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "maven-plugin" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "jar" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "dll" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "maven-archetype" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchPackaging()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "maven-plugin" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "jar" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 4, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "dll" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "maven-archetype" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchPluginPrefix()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.PLUGIN_PREFIX, "test" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.PLUGIN_PREFIX, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchRepository()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.REPOSITORY, "test" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.REPOSITORY, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchMd5()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.MD5, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchSha1()
+ throws RepositoryIndexSearchException
+ {
+ Query query =
+ new TermQuery( new Term( StandardIndexRecordFields.SHA1, "c66f18bf192cb613fc2febb4da541a34133eedc2" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.SHA1, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testExactMatchInceptionYear()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.INCEPTION_YEAR, "2005" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 3, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.INCEPTION_YEAR, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFilename()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "maven" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+/* TODO: if this is a result we want, we need to change the analyzer. Currently, it is tokenizing it as plugin-1.0 and plugin/1.0 in the path
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "plugin" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+*/
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "test" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 9, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchGroupId()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "org.apache.maven.archiva.record" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+
+/* TODO: if we want this result, must change the analyzer to split on '.'
+ query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "maven" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertEquals( "Check results size", 10, results.size() );
+*/
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchArtifactId()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "plugin" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "test" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 9, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "maven" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchVersion()
+ throws RepositoryIndexSearchException
+ {
+ // If partial matches are desired, need to change the analyzer for versions to split on '.'
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "1" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "1.0" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+/* TODO: need to change analyzer to split on - if we want this
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "snapshot" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "alpha" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+*/
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchBaseVersion()
+ throws RepositoryIndexSearchException
+ {
+ // If partial matches are desired, need to change the analyzer for versions to split on '.'
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "1" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "1.0" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+/* TODO: need to change analyzer to split on - if we want this
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "snapshot" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "alpha" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+*/
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClassifier()
+ throws RepositoryIndexSearchException
+ {
+ BooleanQuery bQuery = new BooleanQuery();
+ bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
+ bQuery.add( new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "jdk14" ) ),
+ BooleanClause.Occur.MUST_NOT );
+ List results = index.search( new LuceneQuery( bQuery ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 8, results.size() );
+
+ // TODO: can we search for "anything with no classifier" ?
+
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "jdk14" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchClass()
+ throws RepositoryIndexSearchException
+ {
+ // TODO: should be preserving case!
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "b.c.c" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 5, results.size() );
+
+/* TODO!: need to change the analyzer if we want partial classes (split on '.')
+ query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "C" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
+ assertEquals( "Check results size", 4, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "MyMojo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+*/
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchFiles()
+ throws RepositoryIndexSearchException
+ {
+ // TODO: should be preserving case!
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "manifest.mf" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+
+/*
+ // TODO: should be preserving case, and '-inf'!
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "meta-inf" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
+ assertEquals( "Check results size", 7, results.size() );
+*/
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "plugin.xml" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchProjectName()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "mojo" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
+ assertEquals( "Check results size", 1, results.size() );
+
+ query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "maven" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertFalse( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertEquals( "Check results size", 2, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ public void testMatchProjectDescription()
+ throws RepositoryIndexSearchException
+ {
+ Query query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_DESCRIPTION, "description" ) );
+ List results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
+ assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
+ assertEquals( "Check results size", 3, results.size() );
+
+ // test non-match fails
+ query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_DESCRIPTION, "foo" ) );
+ results = index.search( new LuceneQuery( query ) );
+
+ assertTrue( "Check results size", results.isEmpty() );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar", null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.archiva.record", artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.lucene;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.NumberTools;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecord;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+/**
+ * Test the Lucene implementation of the artifact index.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class LuceneStandardArtifactIndexTest
+ extends PlexusTestCase
+{
+ private RepositoryArtifactIndex index;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private File indexLocation;
+
+ private RepositoryIndexRecordFactory recordFactory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ indexLocation = getTestFile( "target/test-index" );
+
+ FileUtils.deleteDirectory( indexLocation );
+
+ index = factory.createStandardIndex( indexLocation );
+ }
+
+ public void testIndexExists()
+ throws IOException, RepositoryIndexException
+ {
+ assertFalse( "check index doesn't exist", index.exists() );
+
+ // create empty directory
+ indexLocation.mkdirs();
+ assertFalse( "check index doesn't exist even if directory does", index.exists() );
+
+ // create index, with no records
+ createEmptyIndex();
+ assertTrue( "check index is considered to exist", index.exists() );
+
+ // Test non-directory
+ FileUtils.deleteDirectory( indexLocation );
+ indexLocation.createNewFile();
+ try
+ {
+ index.exists();
+ fail( "Index operation should fail as the location is not valid" );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // great
+ }
+ finally
+ {
+ indexLocation.delete();
+ }
+ }
+
+ public void testAddRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordExistingEmptyIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ // Do it again
+ record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertJarRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPomRecord()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertPomRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testAddPlugin()
+ throws IOException, RepositoryIndexException, XmlPullParserException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.indexRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ Document document = reader.document( 0 );
+ assertPluginRecord( artifact, document );
+ assertEquals( "Check index size", 1, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.indexRecords( Collections.singletonList( record ) );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNotInIndex()
+ throws IOException, RepositoryIndexException
+ {
+ createEmptyIndex();
+
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+
+ index.deleteRecords( Collections.singletonList( record ) );
+
+ IndexReader reader = IndexReader.open( indexLocation );
+ try
+ {
+ assertEquals( "No documents", 0, reader.numDocs() );
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+ public void testDeleteRecordNoIndex()
+ throws IOException, RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = recordFactory.createRecord( artifact );
+ index.deleteRecords( Collections.singleton( record ) );
+
+ assertFalse( index.exists() );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "org.apache.maven.archiva.record", artifactId, version, type );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+
+ private void createEmptyIndex()
+ throws IOException
+ {
+ createIndex( Collections.EMPTY_LIST );
+ }
+
+ private void createIndex( List docments )
+ throws IOException
+ {
+ IndexWriter writer = new IndexWriter( indexLocation, new StandardAnalyzer(), true );
+ for ( Iterator i = docments.iterator(); i.hasNext(); )
+ {
+ Document document = (Document) i.next();
+ writer.addDocument( document );
+ }
+ writer.optimize();
+ writer.close();
+ }
+
+ private void assertRecord( Artifact artifact, Document document, String expectedArtifactId, String expectedType,
+ String expectedMd5, String expectedSha1 )
+ {
+ assertEquals( "Check document filename", repository.pathOf( artifact ),
+ document.get( StandardIndexRecordFields.FILENAME ) );
+ assertEquals( "Check document groupId", "org.apache.maven.archiva.record",
+ document.get( StandardIndexRecordFields.GROUPID ) );
+ assertEquals( "Check document artifactId", expectedArtifactId,
+ document.get( StandardIndexRecordFields.ARTIFACTID ) );
+ assertEquals( "Check document version", "1.0", document.get( StandardIndexRecordFields.VERSION ) );
+ assertEquals( "Check document type", expectedType, document.get( StandardIndexRecordFields.TYPE ) );
+ assertEquals( "Check document repository", "test", document.get( StandardIndexRecordFields.REPOSITORY ) );
+ assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
+ document.get( StandardIndexRecordFields.LAST_MODIFIED ) );
+ assertEquals( "Check document md5", expectedMd5, document.get( StandardIndexRecordFields.MD5 ) );
+ assertEquals( "Check document sha1", expectedSha1, document.get( StandardIndexRecordFields.SHA1 ) );
+ assertEquals( "Check document file size", artifact.getFile().length(),
+ NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
+ assertNull( "Check document classifier", document.get( StandardIndexRecordFields.CLASSIFIER ) );
+ }
+
+ private void assertPomRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-pom", "pom", "103d11ac601a42ccf2a2ae54d308c362",
+ "4c4d237c5366df877c3a636d5b6241822d090355" );
+ assertNull( "Check document classes", document.get( StandardIndexRecordFields.CLASSES ) );
+ assertNull( "Check document files", document.get( StandardIndexRecordFields.FILES ) );
+ assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertEquals( "Check document year", "2005", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertEquals( "Check document project name", "Maven Repository Manager Test POM",
+ document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertEquals( "Check document project description", "Description",
+ document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ assertEquals( "Check document packaging", "pom", document.get( StandardIndexRecordFields.PACKAGING ) );
+ }
+
+ private void assertJarRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-jar", "jar", "3a0adc365f849366cd8b633cad155cb7",
+ "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ assertEquals( "Check document classes", "A\nb.B\nb.c.C", document.get( StandardIndexRecordFields.CLASSES ) );
+ assertEquals( "Check document files", "META-INF/MANIFEST.MF\nA.class\nb/B.class\nb/c/C.class",
+ document.get( StandardIndexRecordFields.FILES ) );
+ assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertNull( "Check document projectName", document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertNull( "Check document packaging", document.get( StandardIndexRecordFields.PACKAGING ) );
+ }
+
+ private void assertPluginRecord( Artifact artifact, Document document )
+ {
+ assertRecord( artifact, document, "test-plugin", "maven-plugin", "3530896791670ebb45e17708e5d52c40",
+ "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
+ assertEquals( "Check document classes", "org.apache.maven.archiva.record.MyMojo",
+ document.get( StandardIndexRecordFields.CLASSES ) );
+ assertEquals( "Check document files", "META-INF/MANIFEST.MF\n" +
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties\n" +
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml\n" + "META-INF/maven/plugin.xml\n" +
+ "org/apache/maven/archiva/record/MyMojo.class", document.get( StandardIndexRecordFields.FILES ) );
+ assertEquals( "Check document pluginPrefix", "test", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
+ assertEquals( "Check document packaging", "maven-plugin", document.get( StandardIndexRecordFields.PACKAGING ) );
+ assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
+ assertEquals( "Check document project name", "Maven Mojo Archetype",
+ document.get( StandardIndexRecordFields.PROJECT_NAME ) );
+ assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
+ }
+
+ private String getLastModified( File file )
+ {
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
+ dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
+ return dateFormat.format( new Date( file.lastModified() ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.query;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import junit.framework.TestCase;
+
+import java.util.Iterator;
+
+/**
+ * @author Brett Porter
+ */
+public class QueryTest
+ extends TestCase
+{
+ private QueryTerm term1 = new QueryTerm( "field1", "value1" );
+
+ private QueryTerm term2 = new QueryTerm( "field2", "value2" );
+
+ private QueryTerm term3 = new QueryTerm( "field3", "value3" );
+
+ public void testQueryTerm()
+ {
+ QueryTerm query = new QueryTerm( "Field", "Value" );
+ assertEquals( "check field setting", "Field", query.getField() );
+ assertEquals( "check value setting", "Value", query.getValue() );
+ }
+
+ public void testSingleTermQuery()
+ {
+ SingleTermQuery query = new SingleTermQuery( "Field", "Value" );
+ assertEquals( "check field setting", "Field", query.getField() );
+ assertEquals( "check value setting", "Value", query.getValue() );
+
+ query = new SingleTermQuery( term1 );
+ assertEquals( "check field setting", "field1", query.getField() );
+ assertEquals( "check value setting", "value1", query.getValue() );
+ }
+
+ public void testRangeQueryOpen()
+ {
+ RangeQuery rangeQuery = RangeQuery.createOpenRange();
+ assertNull( "Check range has no start", rangeQuery.getBegin() );
+ assertNull( "Check range has no end", rangeQuery.getEnd() );
+ }
+
+ public void testRangeQueryExclusive()
+ {
+ RangeQuery rangeQuery = RangeQuery.createExclusiveRange( term1, term2 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertEquals( "Check range end", term2, rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testRangeQueryInclusive()
+ {
+ RangeQuery rangeQuery = RangeQuery.createInclusiveRange( term1, term2 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertEquals( "Check range end", term2, rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testRangeQueryOpenEnded()
+ {
+ RangeQuery rangeQuery = RangeQuery.createGreaterThanOrEqualToRange( term1 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertNull( "Check range end", rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createGreaterThanRange( term1 );
+ assertEquals( "Check range start", term1, rangeQuery.getBegin() );
+ assertNull( "Check range end", rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createLessThanOrEqualToRange( term1 );
+ assertNull( "Check range start", rangeQuery.getBegin() );
+ assertEquals( "Check range end", term1, rangeQuery.getEnd() );
+ assertTrue( "Check inclusive", rangeQuery.isInclusive() );
+
+ rangeQuery = RangeQuery.createLessThanRange( term1 );
+ assertNull( "Check range start", rangeQuery.getBegin() );
+ assertEquals( "Check range end", term1, rangeQuery.getEnd() );
+ assertFalse( "Check exclusive", rangeQuery.isInclusive() );
+ }
+
+ public void testCompundQuery()
+ {
+ CompoundQuery query = new CompoundQuery();
+ assertTrue( "check query is empty", query.getCompoundQueryTerms().isEmpty() );
+
+ query.and( term1 );
+ query.or( term2 );
+ query.not( term3 );
+
+ Iterator i = query.getCompoundQueryTerms().iterator();
+ CompoundQueryTerm term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check first term", "field1", getQuery( term ).getField() );
+ assertEquals( "Check first term", "value1", getQuery( term ).getValue() );
+ assertTrue( "Check first term", term.isRequired() );
+ assertFalse( "Check first term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check second term", "field2", getQuery( term ).getField() );
+ assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
+ assertFalse( "Check second term", term.isRequired() );
+ assertFalse( "Check second term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check third term", "field3", getQuery( term ).getField() );
+ assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
+ assertFalse( "Check third term", term.isRequired() );
+ assertTrue( "Check third term", term.isProhibited() );
+
+ CompoundQuery query2 = new CompoundQuery();
+ query2.and( query );
+ query2.or( new SingleTermQuery( term2 ) );
+ query2.not( new SingleTermQuery( term3 ) );
+
+ i = query2.getCompoundQueryTerms().iterator();
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check first term", query, term.getQuery() );
+ assertTrue( "Check first term", term.isRequired() );
+ assertFalse( "Check first term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check second term", "field2", getQuery( term ).getField() );
+ assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
+ assertFalse( "Check second term", term.isRequired() );
+ assertFalse( "Check second term", term.isProhibited() );
+
+ term = (CompoundQueryTerm) i.next();
+ assertEquals( "Check third term", "field3", getQuery( term ).getField() );
+ assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
+ assertFalse( "Check third term", term.isRequired() );
+ assertTrue( "Check third term", term.isProhibited() );
+ }
+
+ private static SingleTermQuery getQuery( CompoundQueryTerm term )
+ {
+ return (SingleTermQuery) term.getQuery();
+ }
+}
+
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Test the minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class MinimalArtifactIndexRecordFactoryTest
+ extends PlexusTestCase
+{
+ private RepositoryIndexRecordFactory factory;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
+
+ private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+ }
+
+ public void testIndexedJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPomWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testNonIndexedPom()
+ throws RepositoryIndexException
+ {
+ // If we pass in only the POM that belongs to a JAR, then expect null not the POM
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-plugin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-archetype", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testIndexedPlugin()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( Collections.singletonList( "org.apache.maven.archiva.record.MyMojo" ) );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testCorruptJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-corrupt-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testNonJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testMissingFile()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-foo" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.indexing.record;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test the minimal artifact index record.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class StandardArtifactIndexRecordFactoryTest
+ extends PlexusTestCase
+{
+ private RepositoryIndexRecordFactory factory;
+
+ private ArtifactRepository repository;
+
+ private ArtifactFactory artifactFactory;
+
+ private static final String TEST_GROUP_ID = "org.apache.maven.archiva.record";
+
+ private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
+
+ private static final List JAR_FILE_LIST =
+ Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "A.class", "b/B.class", "b/c/C.class"} );
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ ArtifactRepositoryFactory repositoryFactory =
+ (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ File file = getTestFile( "src/test/managed-repository" );
+ repository =
+ repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
+ }
+
+ public void testIndexedJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setClassifier( "jdk14" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar-and-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-alpha-1" );
+ expectedRecord.setVersion( "1.0-alpha-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Test JAR and POM" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarAndPomWithClassifier()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-jar-and-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-alpha-1" );
+ expectedRecord.setVersion( "1.0-alpha-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Test JAR and POM" );
+ expectedRecord.setClassifier( "jdk14" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedJarWithParentPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setClasses( JAR_CLASS_LIST );
+ expectedRecord.setArtifactId( "test-child-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0-SNAPSHOT" );
+ expectedRecord.setVersion( "1.0-20060728.121314-1" );
+ expectedRecord.setFiles( JAR_FILE_LIST );
+ expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
+ expectedRecord.setType( "jar" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Child Project" );
+ expectedRecord.setProjectDescription( "Description" );
+ expectedRecord.setInceptionYear( "2005" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedPom()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "103d11ac601a42ccf2a2ae54d308c362" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-pom" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "4c4d237c5366df877c3a636d5b6241822d090355" );
+ expectedRecord.setType( "pom" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setPackaging( "pom" );
+ expectedRecord.setInceptionYear( "2005" );
+ expectedRecord.setProjectName( "Maven Repository Manager Test POM" );
+ expectedRecord.setProjectDescription( "Description" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testNonIndexedPom()
+ throws RepositoryIndexException
+ {
+ // If we pass in only the POM that belongs to a JAR, then expect null not the POM
+ Artifact artifact = createArtifact( "test-jar-and-pom", "1.0", "pom" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-plugin", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+
+ artifact = createArtifact( "test-archetype", "1.0", "pom" );
+
+ record = factory.createRecord( artifact );
+
+ assertNull( "Check no record", record );
+ }
+
+ public void testIndexedPlugin()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-plugin" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "3530896791670ebb45e17708e5d52c40" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-plugin" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "2cd2619d59a684e82e97471d2c2e004144c8f24e" );
+ expectedRecord.setType( "maven-plugin" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setClasses( Arrays.asList( new String[]{"org.apache.maven.archiva.record.MyMojo"} ) );
+ expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF",
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.properties",
+ "META-INF/maven/org.apache.maven.archiva.record/test-plugin/pom.xml", "META-INF/maven/plugin.xml",
+ "org/apache/maven/archiva/record/MyMojo.class"} ) );
+ expectedRecord.setPackaging( "maven-plugin" );
+ expectedRecord.setProjectName( "Maven Mojo Archetype" );
+ expectedRecord.setPluginPrefix( "test" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testIndexedArchetype()
+ throws RepositoryIndexException, IOException, XmlPullParserException
+ {
+ Artifact artifact = createArtifact( "test-archetype" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "52b7ea4b53818b8a5f4c329d88fd60d9" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-archetype" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0" );
+ expectedRecord.setVersion( "1.0" );
+ expectedRecord.setSha1Checksum( "05841f5e51c124f1729d86c1687438c36b9255d9" );
+ expectedRecord.setType( "maven-archetype" );
+ expectedRecord.setRepository( "test" );
+ expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "META-INF/maven/archetype.xml",
+ "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.properties",
+ "META-INF/maven/org.apache.maven.archiva.record/test-archetype/pom.xml", "archetype-resources/pom.xml",
+ "archetype-resources/src/main/java/App.java", "archetype-resources/src/test/java/AppTest.java"} ) );
+ expectedRecord.setPackaging( "jar" );
+ expectedRecord.setProjectName( "Archetype - test-archetype" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testCorruptJar()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-corrupt-jar" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ public void testDll()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
+ expectedRecord.setMd5Checksum( "d41d8cd98f00b204e9800998ecf8427e" );
+ expectedRecord.setFilename( repository.pathOf( artifact ) );
+ expectedRecord.setLastModified( artifact.getFile().lastModified() );
+ expectedRecord.setSize( artifact.getFile().length() );
+ expectedRecord.setArtifactId( "test-dll" );
+ expectedRecord.setGroupId( TEST_GROUP_ID );
+ expectedRecord.setBaseVersion( "1.0.1.34" );
+ expectedRecord.setVersion( "1.0.1.34" );
+ expectedRecord.setSha1Checksum( "da39a3ee5e6b4b0d3255bfef95601890afd80709" );
+ expectedRecord.setType( "dll" );
+ expectedRecord.setRepository( "test" );
+
+ assertEquals( "check record", expectedRecord, record );
+ }
+
+ public void testMissingFile()
+ throws RepositoryIndexException
+ {
+ Artifact artifact = createArtifact( "test-foo" );
+
+ RepositoryIndexRecord record = factory.createRecord( artifact );
+
+ assertNull( "Confirm no record is returned", record );
+ }
+
+ private Artifact createArtifact( String artifactId )
+ {
+ return createArtifact( artifactId, "1.0", "jar" );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type )
+ {
+ return createArtifact( artifactId, version, type, null );
+ }
+
+ private Artifact createArtifact( String artifactId, String version, String type, String classifier )
+ {
+ Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
+ VersionRange.createFromVersion( version ), type,
+ classifier, Artifact.SCOPE_RUNTIME );
+ artifact.isSnapshot();
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ artifact.setRepository( repository );
+ return artifact;
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.record.MinimalIndexRecordFields;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the Lucene implementation of the artifact index search.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
- */
-public class LuceneMinimalArtifactIndexSearchTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- private Map records = new HashMap();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createMinimalIndex( indexLocation );
-
- records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
- records.put( "test-jar-jdk14",
- recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
- records.put( "test-jar-and-pom",
- recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
- records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
- createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
- records.put( "test-child-pom",
- recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
- records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
- records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
- records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
- records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
- records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
-
- index.indexRecords( records.values() );
- }
-
- public void testExactMatchMd5()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( MinimalIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( MinimalIndexRecordFields.MD5, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFilename()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "maven" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
-/* TODO: if this is a result we want, we need to change the analyzer. Currently, it is tokenizing it as plugin-1.0 and plugin/1.0 in the path
- query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "plugin" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-*/
- query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "test" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( MinimalIndexRecordFields.FILENAME, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClass()
- throws RepositoryIndexSearchException
- {
- // TODO: should be preserving case!
- Query query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "b.c.c" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
-/* TODO!: need to change the analyzer if we want partial classes (split on '.')
- query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "C" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 4, results.size() );
-
- query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "MyMojo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-*/
-
- // test non-match fails
- query = new TermQuery( new Term( MinimalIndexRecordFields.CLASSES, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar", null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.repository.record", artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.NumberTools;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.record.MinimalIndexRecordFields;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.TimeZone;
-
-/**
- * Test the Lucene implementation of the artifact index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneMinimalArtifactIndexTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createMinimalIndex( indexLocation );
- }
-
- public void testIndexExists()
- throws IOException, RepositoryIndexException
- {
- assertFalse( "check index doesn't exist", index.exists() );
-
- // create empty directory
- indexLocation.mkdirs();
- assertFalse( "check index doesn't exist even if directory does", index.exists() );
-
- // create index, with no records
- createEmptyIndex();
- assertTrue( "check index is considered to exist", index.exists() );
-
- // Test non-directory
- FileUtils.deleteDirectory( indexLocation );
- indexLocation.createNewFile();
- try
- {
- index.exists();
- fail( "Index operation should fail as the location is not valid" );
- }
- catch ( RepositoryIndexException e )
- {
- // great
- }
- finally
- {
- indexLocation.delete();
- }
- }
-
- public void testAddRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertEquals( "Check document", repository.pathOf( artifact ),
- document.get( MinimalIndexRecordFields.FILENAME ) );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordExistingEmptyIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- // Do it again
- record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "3a0adc365f849366cd8b633cad155cb7", "A\nb.B\nb.c.C" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNotInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.deleteRecords( Collections.singleton( record ) );
-
- assertFalse( index.exists() );
- }
-
- public void testAddPomRecord()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPlugin()
- throws IOException, RepositoryIndexException, XmlPullParserException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertRecord( document, artifact, "06f6fe25e46c4d4fb5be4f56a9bab0ee",
- "org.apache.maven.repository.record.MyMojo" );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- Artifact artifact =
- artifactFactory.createBuildArtifact( "org.apache.maven.repository.record", artifactId, version, type );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- private void createEmptyIndex()
- throws IOException
- {
- createIndex( Collections.EMPTY_LIST );
- }
-
- private void createIndex( List docments )
- throws IOException
- {
- IndexWriter writer = new IndexWriter( indexLocation, new StandardAnalyzer(), true );
- for ( Iterator i = docments.iterator(); i.hasNext(); )
- {
- Document document = (Document) i.next();
- writer.addDocument( document );
- }
- writer.optimize();
- writer.close();
- }
-
- private void assertRecord( Document document, Artifact artifact, String expectedChecksum, String expectedClasses )
- {
- assertEquals( "Check document filename", repository.pathOf( artifact ),
- document.get( MinimalIndexRecordFields.FILENAME ) );
- assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
- document.get( MinimalIndexRecordFields.LAST_MODIFIED ) );
- assertEquals( "Check document checksum", expectedChecksum, document.get( MinimalIndexRecordFields.MD5 ) );
- assertEquals( "Check document size", artifact.getFile().length(),
- NumberTools.stringToLong( document.get( MinimalIndexRecordFields.FILE_SIZE ) ) );
- assertEquals( "Check document classes", expectedClasses, document.get( MinimalIndexRecordFields.CLASSES ) );
- }
-
- private String getLastModified( File file )
- {
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
- return dateFormat.format( new Date( file.lastModified() ) );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test the Lucene implementation of the artifact index search.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo would be nice to abstract some of the query away, but for now passing in a Lucene query directly is good enough
- */
-public class LuceneStandardArtifactIndexSearchTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- private Map records = new HashMap();
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createStandardIndex( indexLocation );
-
- records.put( "test-jar", recordFactory.createRecord( createArtifact( "test-jar" ) ) );
- records.put( "test-jar-jdk14",
- recordFactory.createRecord( createArtifact( "test-jar", "1.0", "jar", "jdk14" ) ) );
- records.put( "test-jar-and-pom",
- recordFactory.createRecord( createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" ) ) );
- records.put( "test-jar-and-pom-jdk14", recordFactory.createRecord(
- createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" ) ) );
- records.put( "test-child-pom",
- recordFactory.createRecord( createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" ) ) );
- records.put( "test-archetype", recordFactory.createRecord( createArtifact( "test-archetype" ) ) );
- records.put( "test-plugin", recordFactory.createRecord( createArtifact( "test-plugin" ) ) );
- records.put( "test-pom", recordFactory.createRecord( createArtifact( "test-pom", "1.0", "pom" ) ) );
- records.put( "parent-pom", recordFactory.createRecord( createArtifact( "parent-pom", "1", "pom" ) ) );
- records.put( "test-dll", recordFactory.createRecord( createArtifact( "test-dll", "1.0.1.34", "dll" ) ) );
-
- index.indexRecords( records.values() );
- }
-
- public void testExactMatchVersion()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0-SNAPSHOT" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "1.0-20060728.121314-1" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION_EXACT, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchBaseVersion()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-SNAPSHOT" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "1.0-20060728.121314-1" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION_EXACT, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchGroupId()
- throws RepositoryIndexSearchException
- {
- Query query =
- new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven.repository.record" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- // test partial match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "org.apache.maven" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchArtifactId()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "test-jar" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test partial match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "test" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchType()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "maven-plugin" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "jar" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "dll" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "maven-archetype" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.TYPE, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchPackaging()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "maven-plugin" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "jar" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 4, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "dll" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "maven-archetype" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.PACKAGING, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchPluginPrefix()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.PLUGIN_PREFIX, "test" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.PLUGIN_PREFIX, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchRepository()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.REPOSITORY, "test" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.REPOSITORY, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchMd5()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.MD5, "3a0adc365f849366cd8b633cad155cb7" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.MD5, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchSha1()
- throws RepositoryIndexSearchException
- {
- Query query =
- new TermQuery( new Term( StandardIndexRecordFields.SHA1, "c66f18bf192cb613fc2febb4da541a34133eedc2" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.SHA1, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testExactMatchInceptionYear()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.INCEPTION_YEAR, "2005" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 3, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.INCEPTION_YEAR, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFilename()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "maven" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
-/* TODO: if this is a result we want, we need to change the analyzer. Currently, it is tokenizing it as plugin-1.0 and plugin/1.0 in the path
- query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "plugin" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-*/
- query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "test" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 9, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.FILENAME, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchGroupId()
- throws RepositoryIndexSearchException
- {
- Query query =
- new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "org.apache.maven.repository.record" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-
-/* TODO: if we want this result, must change the analyzer to split on '.'
- query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "maven" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertEquals( "Check results size", 10, results.size() );
-*/
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.GROUPID, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchArtifactId()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "plugin" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "test" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 9, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID, "maven" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchVersion()
- throws RepositoryIndexSearchException
- {
- // If partial matches are desired, need to change the analyzer for versions to split on '.'
- Query query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "1" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "1.0" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
-/* TODO: need to change analyzer to split on - if we want this
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "snapshot" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "alpha" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-*/
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.VERSION, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchBaseVersion()
- throws RepositoryIndexSearchException
- {
- // If partial matches are desired, need to change the analyzer for versions to split on '.'
- Query query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "1" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "1.0" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-archetype" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
-/* TODO: need to change analyzer to split on - if we want this
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "snapshot" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "alpha" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-*/
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.BASE_VERSION, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClassifier()
- throws RepositoryIndexSearchException
- {
- BooleanQuery bQuery = new BooleanQuery();
- bQuery.add( new MatchAllDocsQuery(), BooleanClause.Occur.MUST );
- bQuery.add( new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "jdk14" ) ),
- BooleanClause.Occur.MUST_NOT );
- List results = index.search( new LuceneQuery( bQuery ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 8, results.size() );
-
- // TODO: can we search for "anything with no classifier" ?
-
- Query query = new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "jdk14" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.CLASSIFIER, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchClass()
- throws RepositoryIndexSearchException
- {
- // TODO: should be preserving case!
- Query query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "b.c.c" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 5, results.size() );
-
-/* TODO!: need to change the analyzer if we want partial classes (split on '.')
- query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "C" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-jar" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-jdk14" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-jar-and-pom-jdk14" ) ) );
- assertEquals( "Check results size", 4, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "MyMojo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-*/
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.CLASSES, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchFiles()
- throws RepositoryIndexSearchException
- {
- // TODO: should be preserving case!
- Query query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "manifest.mf" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-
-/*
- // TODO: should be preserving case, and '-inf'!
- query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "meta-inf" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-dll" ) ) );
- assertEquals( "Check results size", 7, results.size() );
-*/
-
- query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "plugin.xml" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.FILES, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchProjectName()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "mojo" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-plugin" ) ) );
- assertEquals( "Check results size", 1, results.size() );
-
- query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "maven" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertFalse( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertFalse( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertEquals( "Check results size", 2, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_NAME, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- public void testMatchProjectDescription()
- throws RepositoryIndexSearchException
- {
- Query query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_DESCRIPTION, "description" ) );
- List results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check result", results.contains( records.get( "test-child-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "parent-pom" ) ) );
- assertTrue( "Check result", results.contains( records.get( "test-pom" ) ) );
- assertEquals( "Check results size", 3, results.size() );
-
- // test non-match fails
- query = new TermQuery( new Term( StandardIndexRecordFields.PROJECT_DESCRIPTION, "foo" ) );
- results = index.search( new LuceneQuery( query ) );
-
- assertTrue( "Check results size", results.isEmpty() );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar", null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( "org.apache.maven.repository.record", artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.lucene;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.NumberTools;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecord;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.TimeZone;
-
-/**
- * Test the Lucene implementation of the artifact index.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class LuceneStandardArtifactIndexTest
- extends PlexusTestCase
-{
- private RepositoryArtifactIndex index;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private File indexLocation;
-
- private RepositoryIndexRecordFactory recordFactory;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- recordFactory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- indexLocation = getTestFile( "target/test-index" );
-
- FileUtils.deleteDirectory( indexLocation );
-
- index = factory.createStandardIndex( indexLocation );
- }
-
- public void testIndexExists()
- throws IOException, RepositoryIndexException
- {
- assertFalse( "check index doesn't exist", index.exists() );
-
- // create empty directory
- indexLocation.mkdirs();
- assertFalse( "check index doesn't exist even if directory does", index.exists() );
-
- // create index, with no records
- createEmptyIndex();
- assertTrue( "check index is considered to exist", index.exists() );
-
- // Test non-directory
- FileUtils.deleteDirectory( indexLocation );
- indexLocation.createNewFile();
- try
- {
- index.exists();
- fail( "Index operation should fail as the location is not valid" );
- }
- catch ( RepositoryIndexException e )
- {
- // great
- }
- finally
- {
- indexLocation.delete();
- }
- }
-
- public void testAddRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordExistingEmptyIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- // Do it again
- record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertJarRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPomRecord()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertPomRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testAddPlugin()
- throws IOException, RepositoryIndexException, XmlPullParserException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.indexRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- Document document = reader.document( 0 );
- assertPluginRecord( artifact, document );
- assertEquals( "Check index size", 1, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.indexRecords( Collections.singletonList( record ) );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNotInIndex()
- throws IOException, RepositoryIndexException
- {
- createEmptyIndex();
-
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
-
- index.deleteRecords( Collections.singletonList( record ) );
-
- IndexReader reader = IndexReader.open( indexLocation );
- try
- {
- assertEquals( "No documents", 0, reader.numDocs() );
- }
- finally
- {
- reader.close();
- }
- }
-
- public void testDeleteRecordNoIndex()
- throws IOException, RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = recordFactory.createRecord( artifact );
- index.deleteRecords( Collections.singleton( record ) );
-
- assertFalse( index.exists() );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- Artifact artifact =
- artifactFactory.createBuildArtifact( "org.apache.maven.repository.record", artifactId, version, type );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-
- private void createEmptyIndex()
- throws IOException
- {
- createIndex( Collections.EMPTY_LIST );
- }
-
- private void createIndex( List docments )
- throws IOException
- {
- IndexWriter writer = new IndexWriter( indexLocation, new StandardAnalyzer(), true );
- for ( Iterator i = docments.iterator(); i.hasNext(); )
- {
- Document document = (Document) i.next();
- writer.addDocument( document );
- }
- writer.optimize();
- writer.close();
- }
-
- private void assertRecord( Artifact artifact, Document document, String expectedArtifactId, String expectedType,
- String expectedMd5, String expectedSha1 )
- {
- assertEquals( "Check document filename", repository.pathOf( artifact ),
- document.get( StandardIndexRecordFields.FILENAME ) );
- assertEquals( "Check document groupId", "org.apache.maven.repository.record",
- document.get( StandardIndexRecordFields.GROUPID ) );
- assertEquals( "Check document artifactId", expectedArtifactId,
- document.get( StandardIndexRecordFields.ARTIFACTID ) );
- assertEquals( "Check document version", "1.0", document.get( StandardIndexRecordFields.VERSION ) );
- assertEquals( "Check document type", expectedType, document.get( StandardIndexRecordFields.TYPE ) );
- assertEquals( "Check document repository", "test", document.get( StandardIndexRecordFields.REPOSITORY ) );
- assertEquals( "Check document timestamp", getLastModified( artifact.getFile() ),
- document.get( StandardIndexRecordFields.LAST_MODIFIED ) );
- assertEquals( "Check document md5", expectedMd5, document.get( StandardIndexRecordFields.MD5 ) );
- assertEquals( "Check document sha1", expectedSha1, document.get( StandardIndexRecordFields.SHA1 ) );
- assertEquals( "Check document file size", artifact.getFile().length(),
- NumberTools.stringToLong( document.get( StandardIndexRecordFields.FILE_SIZE ) ) );
- assertNull( "Check document classifier", document.get( StandardIndexRecordFields.CLASSIFIER ) );
- }
-
- private void assertPomRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-pom", "pom", "32dbef7ff11eb933bd8b7e7bcab85406",
- "c3b374e394607e1e705e71c227f62641e8621ebe" );
- assertNull( "Check document classes", document.get( StandardIndexRecordFields.CLASSES ) );
- assertNull( "Check document files", document.get( StandardIndexRecordFields.FILES ) );
- assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertEquals( "Check document year", "2005", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertEquals( "Check document project name", "Maven Repository Manager Test POM",
- document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertEquals( "Check document project description", "Description",
- document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- assertEquals( "Check document packaging", "pom", document.get( StandardIndexRecordFields.PACKAGING ) );
- }
-
- private void assertJarRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-jar", "jar", "3a0adc365f849366cd8b633cad155cb7",
- "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- assertEquals( "Check document classes", "A\nb.B\nb.c.C", document.get( StandardIndexRecordFields.CLASSES ) );
- assertEquals( "Check document files", "META-INF/MANIFEST.MF\nA.class\nb/B.class\nb/c/C.class",
- document.get( StandardIndexRecordFields.FILES ) );
- assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertNull( "Check document projectName", document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- assertNull( "Check document pluginPrefix", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertNull( "Check document packaging", document.get( StandardIndexRecordFields.PACKAGING ) );
- }
-
- private void assertPluginRecord( Artifact artifact, Document document )
- {
- assertRecord( artifact, document, "test-plugin", "maven-plugin", "06f6fe25e46c4d4fb5be4f56a9bab0ee",
- "382c1ebfb5d0c7d6061c2f8569fb53f8fc00fec2" );
- assertEquals( "Check document classes", "org.apache.maven.repository.record.MyMojo",
- document.get( StandardIndexRecordFields.CLASSES ) );
- assertEquals( "Check document files", "META-INF/MANIFEST.MF\n" + "META-INF/maven/plugin.xml\n" +
- "org/apache/maven/repository/record/MyMojo.class\n" +
- "META-INF/maven/org.apache.maven.repository.record/test-plugin/pom.xml\n" +
- "META-INF/maven/org.apache.maven.repository.record/test-plugin/pom.properties",
- document.get( StandardIndexRecordFields.FILES ) );
- assertEquals( "Check document pluginPrefix", "test", document.get( StandardIndexRecordFields.PLUGIN_PREFIX ) );
- assertEquals( "Check document packaging", "maven-plugin", document.get( StandardIndexRecordFields.PACKAGING ) );
- assertNull( "Check document inceptionYear", document.get( StandardIndexRecordFields.INCEPTION_YEAR ) );
- assertEquals( "Check document project name", "Maven Mojo Archetype",
- document.get( StandardIndexRecordFields.PROJECT_NAME ) );
- assertNull( "Check document projectDesc", document.get( StandardIndexRecordFields.PROJECT_DESCRIPTION ) );
- }
-
- private String getLastModified( File file )
- {
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMddHHmmss", Locale.US );
- dateFormat.setTimeZone( TimeZone.getTimeZone( "UTC" ) );
- return dateFormat.format( new Date( file.lastModified() ) );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.query;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import junit.framework.TestCase;
-
-import java.util.Iterator;
-
-/**
- * @author Brett Porter
- */
-public class QueryTest
- extends TestCase
-{
- private QueryTerm term1 = new QueryTerm( "field1", "value1" );
-
- private QueryTerm term2 = new QueryTerm( "field2", "value2" );
-
- private QueryTerm term3 = new QueryTerm( "field3", "value3" );
-
- public void testQueryTerm()
- {
- QueryTerm query = new QueryTerm( "Field", "Value" );
- assertEquals( "check field setting", "Field", query.getField() );
- assertEquals( "check value setting", "Value", query.getValue() );
- }
-
- public void testSingleTermQuery()
- {
- SingleTermQuery query = new SingleTermQuery( "Field", "Value" );
- assertEquals( "check field setting", "Field", query.getField() );
- assertEquals( "check value setting", "Value", query.getValue() );
-
- query = new SingleTermQuery( term1 );
- assertEquals( "check field setting", "field1", query.getField() );
- assertEquals( "check value setting", "value1", query.getValue() );
- }
-
- public void testRangeQueryOpen()
- {
- RangeQuery rangeQuery = RangeQuery.createOpenRange();
- assertNull( "Check range has no start", rangeQuery.getBegin() );
- assertNull( "Check range has no end", rangeQuery.getEnd() );
- }
-
- public void testRangeQueryExclusive()
- {
- RangeQuery rangeQuery = RangeQuery.createExclusiveRange( term1, term2 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertEquals( "Check range end", term2, rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
- }
-
- public void testRangeQueryInclusive()
- {
- RangeQuery rangeQuery = RangeQuery.createInclusiveRange( term1, term2 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertEquals( "Check range end", term2, rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
- }
-
- public void testRangeQueryOpenEnded()
- {
- RangeQuery rangeQuery = RangeQuery.createGreaterThanOrEqualToRange( term1 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertNull( "Check range end", rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createGreaterThanRange( term1 );
- assertEquals( "Check range start", term1, rangeQuery.getBegin() );
- assertNull( "Check range end", rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createLessThanOrEqualToRange( term1 );
- assertNull( "Check range start", rangeQuery.getBegin() );
- assertEquals( "Check range end", term1, rangeQuery.getEnd() );
- assertTrue( "Check inclusive", rangeQuery.isInclusive() );
-
- rangeQuery = RangeQuery.createLessThanRange( term1 );
- assertNull( "Check range start", rangeQuery.getBegin() );
- assertEquals( "Check range end", term1, rangeQuery.getEnd() );
- assertFalse( "Check exclusive", rangeQuery.isInclusive() );
- }
-
- public void testCompundQuery()
- {
- CompoundQuery query = new CompoundQuery();
- assertTrue( "check query is empty", query.getCompoundQueryTerms().isEmpty() );
-
- query.and( term1 );
- query.or( term2 );
- query.not( term3 );
-
- Iterator i = query.getCompoundQueryTerms().iterator();
- CompoundQueryTerm term = (CompoundQueryTerm) i.next();
- assertEquals( "Check first term", "field1", getQuery( term ).getField() );
- assertEquals( "Check first term", "value1", getQuery( term ).getValue() );
- assertTrue( "Check first term", term.isRequired() );
- assertFalse( "Check first term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check second term", "field2", getQuery( term ).getField() );
- assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
- assertFalse( "Check second term", term.isRequired() );
- assertFalse( "Check second term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check third term", "field3", getQuery( term ).getField() );
- assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
- assertFalse( "Check third term", term.isRequired() );
- assertTrue( "Check third term", term.isProhibited() );
-
- CompoundQuery query2 = new CompoundQuery();
- query2.and( query );
- query2.or( new SingleTermQuery( term2 ) );
- query2.not( new SingleTermQuery( term3 ) );
-
- i = query2.getCompoundQueryTerms().iterator();
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check first term", query, term.getQuery() );
- assertTrue( "Check first term", term.isRequired() );
- assertFalse( "Check first term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check second term", "field2", getQuery( term ).getField() );
- assertEquals( "Check second term", "value2", getQuery( term ).getValue() );
- assertFalse( "Check second term", term.isRequired() );
- assertFalse( "Check second term", term.isProhibited() );
-
- term = (CompoundQueryTerm) i.next();
- assertEquals( "Check third term", "field3", getQuery( term ).getField() );
- assertEquals( "Check third term", "value3", getQuery( term ).getValue() );
- assertFalse( "Check third term", term.isRequired() );
- assertTrue( "Check third term", term.isProhibited() );
- }
-
- private static SingleTermQuery getQuery( CompoundQueryTerm term )
- {
- return (SingleTermQuery) term.getQuery();
- }
-}
-
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Test the minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class MinimalArtifactIndexRecordFactoryTest
- extends PlexusTestCase
-{
- private RepositoryIndexRecordFactory factory;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private static final String TEST_GROUP_ID = "org.apache.maven.repository.record";
-
- private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "minimal" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
- }
-
- public void testIndexedJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPomWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testNonIndexedPom()
- throws RepositoryIndexException
- {
- // If we pass in only the POM that belongs to a JAR, then expect null not the POM
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-plugin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-archetype", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testIndexedPlugin()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- MinimalArtifactIndexRecord expectedRecord = new MinimalArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "06f6fe25e46c4d4fb5be4f56a9bab0ee" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( Collections.singletonList( "org.apache.maven.repository.record.MyMojo" ) );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testCorruptJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-corrupt-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testNonJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testMissingFile()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-foo" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.indexing.record;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Test the minimal artifact index record.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class StandardArtifactIndexRecordFactoryTest
- extends PlexusTestCase
-{
- private RepositoryIndexRecordFactory factory;
-
- private ArtifactRepository repository;
-
- private ArtifactFactory artifactFactory;
-
- private static final String TEST_GROUP_ID = "org.apache.maven.repository.record";
-
- private static final List JAR_CLASS_LIST = Arrays.asList( new String[]{"A", "b.B", "b.c.C"} );
-
- private static final List JAR_FILE_LIST =
- Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "A.class", "b/B.class", "b/c/C.class"} );
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- factory = (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- ArtifactRepositoryFactory repositoryFactory =
- (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File file = getTestFile( "src/test/managed-repository" );
- repository =
- repositoryFactory.createArtifactRepository( "test", file.toURI().toURL().toString(), layout, null, null );
- }
-
- public void testIndexedJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar", "1.0", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setClassifier( "jdk14" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar-and-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-alpha-1" );
- expectedRecord.setVersion( "1.0-alpha-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Test JAR and POM" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarAndPomWithClassifier()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0-alpha-1", "jar", "jdk14" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-jar-and-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-alpha-1" );
- expectedRecord.setVersion( "1.0-alpha-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Test JAR and POM" );
- expectedRecord.setClassifier( "jdk14" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedJarWithParentPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-child-pom", "1.0-20060728.121314-1", "jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "3a0adc365f849366cd8b633cad155cb7" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setClasses( JAR_CLASS_LIST );
- expectedRecord.setArtifactId( "test-child-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0-SNAPSHOT" );
- expectedRecord.setVersion( "1.0-20060728.121314-1" );
- expectedRecord.setFiles( JAR_FILE_LIST );
- expectedRecord.setSha1Checksum( "c66f18bf192cb613fc2febb4da541a34133eedc2" );
- expectedRecord.setType( "jar" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Child Project" );
- expectedRecord.setProjectDescription( "Description" );
- expectedRecord.setInceptionYear( "2005" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedPom()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "32dbef7ff11eb933bd8b7e7bcab85406" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-pom" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "c3b374e394607e1e705e71c227f62641e8621ebe" );
- expectedRecord.setType( "pom" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setPackaging( "pom" );
- expectedRecord.setInceptionYear( "2005" );
- expectedRecord.setProjectName( "Maven Repository Manager Test POM" );
- expectedRecord.setProjectDescription( "Description" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testNonIndexedPom()
- throws RepositoryIndexException
- {
- // If we pass in only the POM that belongs to a JAR, then expect null not the POM
- Artifact artifact = createArtifact( "test-jar-and-pom", "1.0", "pom" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-plugin", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
-
- artifact = createArtifact( "test-archetype", "1.0", "pom" );
-
- record = factory.createRecord( artifact );
-
- assertNull( "Check no record", record );
- }
-
- public void testIndexedPlugin()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-plugin" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "06f6fe25e46c4d4fb5be4f56a9bab0ee" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-plugin" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "382c1ebfb5d0c7d6061c2f8569fb53f8fc00fec2" );
- expectedRecord.setType( "maven-plugin" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setClasses( Arrays.asList( new String[]{"org.apache.maven.repository.record.MyMojo"} ) );
- expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "META-INF/maven/plugin.xml",
- "org/apache/maven/repository/record/MyMojo.class",
- "META-INF/maven/org.apache.maven.repository.record/test-plugin/pom.xml",
- "META-INF/maven/org.apache.maven.repository.record/test-plugin/pom.properties"} ) );
- expectedRecord.setPackaging( "maven-plugin" );
- expectedRecord.setProjectName( "Maven Mojo Archetype" );
- expectedRecord.setPluginPrefix( "test" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testIndexedArchetype()
- throws RepositoryIndexException, IOException, XmlPullParserException
- {
- Artifact artifact = createArtifact( "test-archetype" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "ecefd4674c75a175119572b19edc45f1" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-archetype" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0" );
- expectedRecord.setVersion( "1.0" );
- expectedRecord.setSha1Checksum( "5ebabafdbcd6684ae434c06e22c32844df284b05" );
- expectedRecord.setType( "maven-archetype" );
- expectedRecord.setRepository( "test" );
- expectedRecord.setFiles( Arrays.asList( new String[]{"META-INF/MANIFEST.MF", "archetype-resources/pom.xml",
- "archetype-resources/src/main/java/App.java", "archetype-resources/src/test/java/AppTest.java",
- "META-INF/maven/archetype.xml", "META-INF/maven/org.apache.maven.repository.record/test-archetype/pom.xml",
- "META-INF/maven/org.apache.maven.repository.record/test-archetype/pom.properties"} ) );
- expectedRecord.setPackaging( "jar" );
- expectedRecord.setProjectName( "Archetype - test-archetype" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testCorruptJar()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-corrupt-jar" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- public void testDll()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-dll", "1.0.1.34", "dll" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- StandardArtifactIndexRecord expectedRecord = new StandardArtifactIndexRecord();
- expectedRecord.setMd5Checksum( "d41d8cd98f00b204e9800998ecf8427e" );
- expectedRecord.setFilename( repository.pathOf( artifact ) );
- expectedRecord.setLastModified( artifact.getFile().lastModified() );
- expectedRecord.setSize( artifact.getFile().length() );
- expectedRecord.setArtifactId( "test-dll" );
- expectedRecord.setGroupId( TEST_GROUP_ID );
- expectedRecord.setBaseVersion( "1.0.1.34" );
- expectedRecord.setVersion( "1.0.1.34" );
- expectedRecord.setSha1Checksum( "da39a3ee5e6b4b0d3255bfef95601890afd80709" );
- expectedRecord.setType( "dll" );
- expectedRecord.setRepository( "test" );
-
- assertEquals( "check record", expectedRecord, record );
- }
-
- public void testMissingFile()
- throws RepositoryIndexException
- {
- Artifact artifact = createArtifact( "test-foo" );
-
- RepositoryIndexRecord record = factory.createRecord( artifact );
-
- assertNull( "Confirm no record is returned", record );
- }
-
- private Artifact createArtifact( String artifactId )
- {
- return createArtifact( artifactId, "1.0", "jar" );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type )
- {
- return createArtifact( artifactId, version, type, null );
- }
-
- private Artifact createArtifact( String artifactId, String version, String type, String classifier )
- {
- Artifact artifact = artifactFactory.createDependencyArtifact( TEST_GROUP_ID, artifactId,
- VersionRange.createFromVersion( version ), type,
- classifier, Artifact.SCOPE_RUNTIME );
- artifact.isSnapshot();
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- artifact.setRepository( repository );
- return artifact;
- }
-}
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>parent-pom</artifactId>
+ <version>1</version>
+ <packaging>pom</packaging>
+ <name>Test Parent POM</name>
+ <description>Description</description>
+ <inceptionYear>2005</inceptionYear>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <modules>
+ <module>test-child-pom</module>
+ </modules>
+</project>
+
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-archetype</artifactId>
+ <version>1.0</version>
+ <name>Archetype - test-archetype</name>
+</project>
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>parent-pom</artifactId>
+ <version>1</version>
+ </parent>
+ <artifactId>test-child-pom</artifactId>
+ <version>1.0-20060731-121314-1</version>
+ <name>Child Project</name>
+</project>
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-jar-and-pom</artifactId>
+ <version>1.0-alpha-1</version>
+ <name>Test JAR and POM</name>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-plugin</artifactId>
+ <packaging>maven-plugin</packaging>
+ <version>1.0</version>
+ <name>Maven Mojo Archetype</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-plugin-api</artifactId>
+ <version>2.0</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.maven.archiva.record</groupId>
+ <artifactId>test-pom</artifactId>
+ <version>1.0</version>
+ <name>Maven Repository Manager Test POM</name>
+ <inceptionYear>2005</inceptionYear>
+ <description>Description</description>
+ <packaging>pom</packaging>
+</project>
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>parent-pom</artifactId>
- <version>1</version>
- <packaging>pom</packaging>
- <name>Test Parent POM</name>
- <description>Description</description>
- <inceptionYear>2005</inceptionYear>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
- <modules>
- <module>test-child-pom</module>
- </modules>
-</project>
-
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>test-archetype</artifactId>
- <version>1.0</version>
- <name>Archetype - test-archetype</name>
-</project>
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>parent-pom</artifactId>
- <version>1</version>
- </parent>
- <artifactId>test-child-pom</artifactId>
- <version>1.0-20060731-121314-1</version>
- <name>Child Project</name>
-</project>
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>test-jar-and-pom</artifactId>
- <version>1.0-alpha-1</version>
- <name>Test JAR and POM</name>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>test-plugin</artifactId>
- <packaging>maven-plugin</packaging>
- <version>1.0</version>
- <name>Maven Mojo Archetype</name>
- <dependencies>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-plugin-api</artifactId>
- <version>2.0</version>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository.record</groupId>
- <artifactId>test-pom</artifactId>
- <version>1.0</version>
- <name>Maven Repository Manager Test POM</name>
- <inceptionYear>2005</inceptionYear>
- <description>Description</description>
- <packaging>pom</packaging>
-</project>
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.DigestUtils;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.archiva.discovery.ArtifactDiscoverer;
+import org.apache.maven.archiva.discovery.DiscovererException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.observers.ChecksumObserver;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.repository.Repository;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An implementation of the proxy handler. This class is not thread safe (the class itself is, but the wagons it uses
+ * are not) - it is declared <code>per-lookup</code> for that reason.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component instantiation-strategy="per-lookup"
+ * @todo use wagonManager for cache use file:// as URL
+ * @todo this currently duplicates a lot of the wagon manager, and doesn't do things like snapshot resolution, etc.
+ * The checksum handling is inconsistent with that of the wagon manager.
+ * Should we have a more artifact based one? This will merge metadata so should behave correctly, and it is able to
+ * correct some limitations of the wagon manager (eg, it can retrieve newer SNAPSHOT files without metadata)
+ */
+public class DefaultProxyRequestHandler
+ extends AbstractLogEnabled
+ implements ProxyRequestHandler
+{
+ /**
+ * @plexus.requirement role-hint="default"
+ * @todo use a map, and have priorities in them
+ */
+ private ArtifactDiscoverer defaultArtifactDiscoverer;
+
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private ArtifactDiscoverer legacyArtifactDiscoverer;
+
+ /**
+ * @plexus.requirement role="org.apache.maven.wagon.Wagon"
+ */
+ private Map/*<String,Wagon>*/ wagons;
+
+ public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return get( path, proxiedRepositories, managedRepository, null );
+ }
+
+ public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return get( managedRepository, path, proxiedRepositories, wagonProxy, false );
+ }
+
+ public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ return getAlways( path, proxiedRepositories, managedRepository, null );
+ }
+
+ public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository,
+ ProxyInfo wagonProxy )
+ throws ResourceDoesNotExistException, ProxyException
+ {
+ return get( managedRepository, path, proxiedRepositories, wagonProxy, true );
+ }
+
+ private File get( ArtifactRepository managedRepository, String path, List proxiedRepositories, ProxyInfo wagonProxy,
+ boolean force )
+ throws ProxyException, ResourceDoesNotExistException
+ {
+ File target = new File( managedRepository.getBasedir(), path );
+
+ for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
+ {
+ ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
+
+ if ( !force && repository.isCachedFailure( path ) )
+ {
+ processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
+ }
+ else
+ {
+ get( path, target, repository, managedRepository, wagonProxy, force );
+ }
+ }
+
+ if ( !target.exists() )
+ {
+ throw new ResourceDoesNotExistException( "Could not find " + path + " in any of the repositories." );
+ }
+
+ return target;
+ }
+
+ private void get( String path, File target, ProxiedArtifactRepository repository,
+ ArtifactRepository managedRepository, ProxyInfo wagonProxy, boolean force )
+ throws ProxyException
+ {
+ ArtifactRepositoryPolicy policy;
+
+ if ( path.endsWith( ".md5" ) || path.endsWith( ".sha1" ) )
+ {
+ // always read from the managed repository, no need to make remote request
+ }
+ else if ( path.endsWith( "maven-metadata.xml" ) )
+ {
+ File metadataFile = new File( target.getParentFile(), ".metadata-" + repository.getRepository().getId() );
+
+ policy = repository.getRepository().getReleases();
+
+ // if it is snapshot metadata, use a different policy
+ if ( path.endsWith( "-SNAPSHOT/maven-metadata.xml" ) )
+ {
+ policy = repository.getRepository().getSnapshots();
+ }
+
+ if ( force || !metadataFile.exists() || isOutOfDate( policy, metadataFile ) )
+ {
+ getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, metadataFile,
+ policy, force );
+
+ mergeMetadataFiles( target, metadataFile );
+ }
+ }
+ else
+ {
+ Artifact artifact = null;
+ try
+ {
+ artifact = defaultArtifactDiscoverer.buildArtifact( path );
+ }
+ catch ( DiscovererException e )
+ {
+ getLogger().debug( "Failed to build artifact using default layout with message: " + e.getMessage() );
+ }
+
+ if ( artifact == null )
+ {
+ try
+ {
+ artifact = legacyArtifactDiscoverer.buildArtifact( path );
+ }
+ catch ( DiscovererException e )
+ {
+ getLogger().debug( "Failed to build artifact using legacy layout with message: " + e.getMessage() );
+ }
+ }
+
+ if ( artifact != null )
+ {
+ ArtifactRepository artifactRepository = repository.getRepository();
+
+ // we use the release policy for tracking failures, but only check for updates on snapshots
+ // also, we don't look for updates on timestamp snapshot files, only non-unique-version ones
+ policy = artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
+
+ boolean needsUpdate = false;
+ if ( artifact.getVersion().endsWith( "-SNAPSHOT" ) && isOutOfDate( policy, target ) )
+ {
+ needsUpdate = true;
+ }
+
+ if ( needsUpdate || force || !target.exists() )
+ {
+ getFileFromRepository( artifactRepository.pathOf( artifact ), repository,
+ managedRepository.getBasedir(), wagonProxy, target, policy, force );
+ }
+ }
+ else
+ {
+ // Some other unknown file in the repository, proxy as is
+ if ( force || !target.exists() )
+ {
+ policy = repository.getRepository().getReleases();
+ getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target, policy,
+ force );
+ }
+ }
+ }
+
+ if ( target.exists() )
+ {
+ // in case it previously failed and we've since found it
+ repository.clearFailure( path );
+ }
+ }
+
+ private void mergeMetadataFiles( File target, File metadataFile )
+ throws ProxyException
+ {
+ MetadataXpp3Reader reader = new MetadataXpp3Reader();
+ if ( metadataFile.exists() )
+ {
+ Metadata metadata = null;
+ if ( target.exists() )
+ {
+ FileReader fileReader = null;
+ try
+ {
+ fileReader = new FileReader( target );
+ metadata = reader.read( fileReader );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new ProxyException( "Unable to parse existing metadata: " + e.getMessage(), e );
+ }
+ catch ( IOException e )
+ {
+ throw new ProxyException( "Unable to read existing metadata: " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+ }
+
+ FileReader fileReader = null;
+ boolean changed = false;
+ try
+ {
+ fileReader = new FileReader( metadataFile );
+ Metadata newMetadata = reader.read( fileReader );
+
+ if ( metadata != null )
+ {
+ changed = metadata.merge( newMetadata );
+ }
+ else
+ {
+ metadata = newMetadata;
+ changed = true;
+ }
+ }
+ catch ( IOException e )
+ {
+ // ignore the merged file
+ getLogger().warn( "Unable to read new metadata: " + e.getMessage() );
+ }
+ catch ( XmlPullParserException e )
+ {
+ // ignore the merged file
+ getLogger().warn( "Unable to parse new metadata: " + e.getMessage() );
+ }
+ finally
+ {
+ IOUtil.close( fileReader );
+ }
+
+ if ( changed )
+ {
+ FileWriter fileWriter = null;
+ try
+ {
+ fileWriter = new FileWriter( target );
+ new MetadataXpp3Writer().write( fileWriter, metadata );
+ }
+ catch ( IOException e )
+ {
+ getLogger().warn( "Unable to store new metadata: " + e.getMessage() );
+ }
+ finally
+ {
+ IOUtil.close( fileWriter );
+ }
+ }
+ }
+ }
+
+ private void getFileFromRepository( String path, ProxiedArtifactRepository repository, String repositoryCachePath,
+ ProxyInfo httpProxy, File target, ArtifactRepositoryPolicy policy,
+ boolean force )
+ throws ProxyException
+ {
+ if ( !policy.isEnabled() )
+ {
+ getLogger().debug( "Skipping disabled repository " + repository.getName() );
+ return;
+ }
+
+ Map checksums = null;
+ Wagon wagon = null;
+
+ File temp = new File( target.getAbsolutePath() + ".tmp" );
+ temp.deleteOnExit();
+
+ boolean connected = false;
+ try
+ {
+ String protocol = repository.getRepository().getProtocol();
+ wagon = (Wagon) wagons.get( protocol );
+ if ( wagon == null )
+ {
+ throw new ProxyException( "Unsupported remote protocol: " + protocol );
+ }
+
+ //@todo configure wagon (ssh settings, etc)
+
+ checksums = prepareChecksumListeners( wagon );
+
+ connected = connectToRepository( wagon, repository, httpProxy );
+ if ( connected )
+ {
+ int tries = 0;
+ boolean success;
+
+ do
+ {
+ tries++;
+
+ getLogger().debug( "Trying " + path + " from " + repository.getName() + "..." );
+
+ boolean downloaded = true;
+ if ( force || !target.exists() )
+ {
+ wagon.get( path, temp );
+ }
+ else
+ {
+ downloaded = wagon.getIfNewer( path, temp, target.lastModified() );
+ }
+
+ if ( downloaded )
+ {
+ success = checkChecksum( checksums, path, wagon, repositoryCachePath );
+
+ if ( tries > 1 && !success )
+ {
+ processRepositoryFailure( repository,
+ "Checksum failures occurred while downloading " + path, path,
+ policy );
+ return;
+ }
+ }
+ else
+ {
+ // getIfNewer determined we were up to date
+ success = true;
+ }
+ }
+ while ( !success );
+
+ // temp won't exist if we called getIfNewer and it was older, but its still a successful return
+ if ( temp.exists() )
+ {
+ moveTempToTarget( temp, target );
+ }
+ }
+ //try next repository
+ }
+ catch ( TransferFailedException e )
+ {
+ processRepositoryFailure( repository, e, path, policy );
+ }
+ catch ( AuthorizationException e )
+ {
+ processRepositoryFailure( repository, e, path, policy );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // hard failure setting doesn't affect "not found".
+ getLogger().debug( "Artifact not found in repository: " + repository.getName() + ": " + e.getMessage() );
+ }
+ finally
+ {
+ temp.delete();
+
+ if ( wagon != null && checksums != null )
+ {
+ releaseChecksumListeners( wagon, checksums );
+ }
+
+ if ( connected )
+ {
+ disconnectWagon( wagon );
+ }
+ }
+ }
+
+ private static boolean isOutOfDate( ArtifactRepositoryPolicy policy, File target )
+ {
+ return policy != null && policy.checkOutOfDate( new Date( target.lastModified() ) );
+ }
+
+ /**
+ * Used to add checksum observers as transfer listeners to the wagonManager object
+ *
+ * @param wagon the wagonManager object to use the checksum with
+ * @return map of ChecksumObservers added into the wagonManager transfer listeners
+ */
+ private Map prepareChecksumListeners( Wagon wagon )
+ {
+ Map checksums = new LinkedHashMap();
+ try
+ {
+ ChecksumObserver checksum = new ChecksumObserver( "SHA-1" );
+ wagon.addTransferListener( checksum );
+ checksums.put( "sha1", checksum );
+
+ checksum = new ChecksumObserver( "MD5" );
+ wagon.addTransferListener( checksum );
+ checksums.put( "md5", checksum );
+ }
+ catch ( NoSuchAlgorithmException e )
+ {
+ getLogger().error( "An error occurred while preparing checksum observers: " + e.getMessage() );
+ }
+ return checksums;
+ }
+
+ private void releaseChecksumListeners( Wagon wagon, Map checksumMap )
+ {
+ for ( Iterator checksums = checksumMap.values().iterator(); checksums.hasNext(); )
+ {
+ ChecksumObserver listener = (ChecksumObserver) checksums.next();
+ wagon.removeTransferListener( listener );
+ }
+ }
+
+ private boolean connectToRepository( Wagon wagon, ProxiedArtifactRepository repository, ProxyInfo httpProxy )
+ {
+ boolean connected = false;
+ try
+ {
+ ArtifactRepository artifactRepository = repository.getRepository();
+ Repository wagonRepository = new Repository( artifactRepository.getId(), artifactRepository.getUrl() );
+ if ( repository.isUseNetworkProxy() && httpProxy != null )
+ {
+ wagon.connect( wagonRepository, httpProxy );
+ }
+ else
+ {
+ wagon.connect( wagonRepository );
+ }
+ connected = true;
+ }
+ catch ( ConnectionException e )
+ {
+ getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
+ }
+ catch ( AuthenticationException e )
+ {
+ getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
+ }
+
+ return connected;
+ }
+
+ private boolean checkChecksum( Map checksumMap, String path, Wagon wagon, String repositoryCachePath )
+ throws ProxyException
+ {
+ releaseChecksumListeners( wagon, checksumMap );
+
+ boolean correctChecksum = false;
+
+ boolean allNotFound = true;
+
+ for ( Iterator i = checksumMap.keySet().iterator(); i.hasNext() && !correctChecksum; )
+ {
+ String checksumExt = (String) i.next();
+ ChecksumObserver checksum = (ChecksumObserver) checksumMap.get( checksumExt );
+ String checksumPath = path + "." + checksumExt;
+ File checksumFile = new File( repositoryCachePath, checksumPath );
+
+ File tempChecksumFile = new File( checksumFile.getAbsolutePath() + ".tmp" );
+ tempChecksumFile.deleteOnExit();
+
+ try
+ {
+ wagon.get( checksumPath, tempChecksumFile );
+
+ allNotFound = false;
+
+ String remoteChecksum = DigestUtils.cleanChecksum( FileUtils.fileRead( tempChecksumFile ),
+ checksumExt.toUpperCase(),
+ path.substring( path.lastIndexOf( '/' ) ) );
+
+ String actualChecksum = checksum.getActualChecksum().toUpperCase();
+ remoteChecksum = remoteChecksum.toUpperCase();
+
+ if ( remoteChecksum.equals( actualChecksum ) )
+ {
+ moveTempToTarget( tempChecksumFile, checksumFile );
+
+ correctChecksum = true;
+ }
+ else
+ {
+ getLogger().warn(
+ "The checksum '" + actualChecksum + "' did not match the remote value: " + remoteChecksum );
+ }
+ }
+ catch ( TransferFailedException e )
+ {
+ getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ getLogger().debug( "The checksum did not exist: " + checksumPath + "; " + e.getMessage() );
+ // do nothing try the next checksum
+ // remove it if it is present locally in case there is an old incorrect one
+ if ( checksumFile.exists() )
+ {
+ checksumFile.delete();
+ }
+ }
+ catch ( AuthorizationException e )
+ {
+ getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( IOException e )
+ {
+ getLogger().warn( "An error occurred while reading the temporary checksum file: " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ catch ( DigesterException e )
+ {
+ getLogger().warn( "The checksum was invalid: " + checksumPath + ": " + e.getMessage() );
+ // do nothing try the next checksum
+
+ allNotFound = false;
+ }
+ finally
+ {
+ tempChecksumFile.delete();
+ }
+ }
+ return correctChecksum || allNotFound;
+ }
+
+ /**
+ * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles
+ * its downloaded files.
+ *
+ * @param temp The completed download file
+ * @param target The final location of the downloaded file
+ * @throws ProxyException when the temp file cannot replace the target file
+ */
+ private void moveTempToTarget( File temp, File target )
+ throws ProxyException
+ {
+ if ( target.exists() && !target.delete() )
+ {
+ throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
+ }
+
+ if ( !temp.renameTo( target ) )
+ {
+ getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." );
+
+ try
+ {
+ FileUtils.copyFile( temp, target );
+ }
+ catch ( IOException e )
+ {
+ throw new ProxyException( "Cannot copy tmp file to its final location", e );
+ }
+ finally
+ {
+ temp.delete();
+ }
+ }
+ }
+
+ /**
+ * Used to disconnect the wagonManager from its repository
+ *
+ * @param wagon the connected wagonManager object
+ */
+ private void disconnectWagon( Wagon wagon )
+ {
+ try
+ {
+ wagon.disconnect();
+ }
+ catch ( ConnectionException e )
+ {
+ getLogger().error( "Problem disconnecting from wagonManager - ignoring: " + e.getMessage() );
+ }
+ }
+
+ private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t, String path,
+ ArtifactRepositoryPolicy policy )
+ throws ProxyException
+ {
+ repository.addFailure( path, policy );
+
+ String message = t.getMessage();
+ if ( repository.isHardFail() )
+ {
+ repository.addFailure( path, policy );
+ throw new ProxyException(
+ "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message, t );
+ }
+
+ getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
+ getLogger().debug( "Cause", t );
+ }
+
+ private void processRepositoryFailure( ProxiedArtifactRepository repository, String message, String path,
+ ArtifactRepositoryPolicy policy )
+ throws ProxyException
+ {
+ repository.addFailure( path, policy );
+
+ processCachedRepositoryFailure( repository, message );
+ }
+
+ private void processCachedRepositoryFailure( ProxiedArtifactRepository repository, String message )
+ throws ProxyException
+ {
+ if ( repository.isHardFail() )
+ {
+ throw new ProxyException(
+ "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message );
+ }
+
+ getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * A proxied artifact repository - contains the artifact repository and additional information about
+ * the proxied repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class ProxiedArtifactRepository
+{
+ /**
+ * Whether to cache failures or not.
+ */
+ private boolean cacheFailures;
+
+ /**
+ * Whether failures on this repository cause the whole group to fail.
+ */
+ private boolean hardFail;
+
+ /**
+ * Whether to use the network proxy for any requests.
+ */
+ private boolean useNetworkProxy;
+
+ /**
+ * The artifact repository on the other end of the proxy.
+ */
+ private final ArtifactRepository repository;
+
+ /**
+ * Cache of failures that have already occurred, containing paths from the repository root. The value given
+ * specifies when the failure should expire.
+ */
+ private Map/*<String,Long>*/ failureCache = new HashMap/*<String,Long>*/();
+
+ /**
+ * A user friendly name for the repository.
+ */
+ private String name;
+
+ public ProxiedArtifactRepository( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ }
+
+ public boolean isHardFail()
+ {
+ return hardFail;
+ }
+
+ public boolean isUseNetworkProxy()
+ {
+ return useNetworkProxy;
+ }
+
+ public boolean isCacheFailures()
+ {
+ return cacheFailures;
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return repository;
+ }
+
+ /**
+ * Check if there is a previously cached failure for requesting the given path.
+ *
+ * @param path the path
+ * @return whether there is a failure
+ */
+ public boolean isCachedFailure( String path )
+ {
+ boolean failed = false;
+ if ( cacheFailures )
+ {
+ Long time = (Long) failureCache.get( path );
+ if ( time != null )
+ {
+ if ( System.currentTimeMillis() < time.longValue() )
+ {
+ failed = true;
+ }
+ else
+ {
+ clearFailure( path );
+ }
+ }
+ }
+ return failed;
+ }
+
+ /**
+ * Add a failure to the cache.
+ *
+ * @param path the path that failed
+ * @param policy the policy for when the failure should expire
+ */
+ public void addFailure( String path, ArtifactRepositoryPolicy policy )
+ {
+ failureCache.put( path, new Long( calculateExpiryTime( policy ) ) );
+ }
+
+ private long calculateExpiryTime( ArtifactRepositoryPolicy policy )
+ {
+ String updatePolicy = policy.getUpdatePolicy();
+ long time;
+ if ( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS.equals( updatePolicy ) )
+ {
+ time = 0;
+ }
+ else if ( ArtifactRepositoryPolicy.UPDATE_POLICY_DAILY.equals( updatePolicy ) )
+ {
+ // Get midnight boundary
+ Calendar cal = Calendar.getInstance();
+ cal.set( Calendar.HOUR_OF_DAY, 0 );
+ cal.set( Calendar.MINUTE, 0 );
+ cal.set( Calendar.SECOND, 0 );
+ cal.set( Calendar.MILLISECOND, 0 );
+ cal.add( Calendar.DAY_OF_MONTH, 1 );
+ time = cal.getTime().getTime();
+ }
+ else if ( updatePolicy.startsWith( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL ) )
+ {
+ String s = updatePolicy.substring( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL.length() + 1 );
+ int minutes = Integer.valueOf( s ).intValue();
+ Calendar cal = Calendar.getInstance();
+ cal.add( Calendar.MINUTE, minutes );
+ time = cal.getTime().getTime();
+ }
+ else
+ {
+ // else assume "never"
+ time = Long.MAX_VALUE;
+ }
+ return time;
+ }
+
+ /**
+ * Remove a failure.
+ *
+ * @param path the path that had previously failed
+ */
+ public void clearFailure( String path )
+ {
+ failureCache.remove( path );
+ }
+
+ public String getName()
+ {
+ return name;
+ }
+
+ public void setCacheFailures( boolean cacheFailures )
+ {
+ this.cacheFailures = cacheFailures;
+ }
+
+ public void setHardFail( boolean hardFail )
+ {
+ this.hardFail = hardFail;
+ }
+
+ public void setUseNetworkProxy( boolean useNetworkProxy )
+ {
+ this.useNetworkProxy = useNetworkProxy;
+ }
+
+ public void setName( String name )
+ {
+ this.name = name;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class ProxyException
+ extends Exception
+{
+ public ProxyException( String message )
+ {
+ super( message );
+ }
+
+ public ProxyException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * An individual request handler for the proxy.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface ProxyRequestHandler
+{
+ /**
+ * The Plexus role of the component.
+ */
+ String ROLE = ProxyRequestHandler.class.getName();
+
+ /**
+ * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @param wagonProxy a network proxy to use when transferring files if needed
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to force remote download of the requested path from any the configured repositories. This method will
+ * only bypass the cache for searching but the requested path will still be cached.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
+ throws ProxyException, ResourceDoesNotExistException;
+
+ /**
+ * Used to force remote download of the requested path from any the configured repositories. This method will
+ * only bypass the cache for searching but the requested path will still be cached.
+ *
+ * @param path the expected repository path
+ * @param proxiedRepositories the repositories being proxied to
+ * @param managedRepository the locally managed repository to cache artifacts in
+ * @param wagonProxy a network proxy to use when transferring files if needed
+ * @return File object referencing the requested path in the cache
+ * @throws ProxyException when an exception occurred during the retrieval of the requested path
+ * @throws org.apache.maven.wagon.ResourceDoesNotExistException
+ * when the requested object can't be found in any of the
+ * configured repositories
+ */
+ File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
+ throws ProxyException, ResourceDoesNotExistException;
+}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.repository.digest.DigestUtils;
-import org.apache.maven.repository.digest.DigesterException;
-import org.apache.maven.repository.discovery.ArtifactDiscoverer;
-import org.apache.maven.repository.discovery.DiscovererException;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.observers.ChecksumObserver;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.repository.Repository;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * An implementation of the proxy handler. This class is not thread safe (the class itself is, but the wagons it uses
- * are not) - it is declared <code>per-lookup</code> for that reason.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component instantiation-strategy="per-lookup"
- * @todo use wagonManager for cache use file:// as URL
- * @todo this currently duplicates a lot of the wagon manager, and doesn't do things like snapshot resolution, etc.
- * The checksum handling is inconsistent with that of the wagon manager.
- * Should we have a more artifact based one? This will merge metadata so should behave correctly, and it is able to
- * correct some limitations of the wagon manager (eg, it can retrieve newer SNAPSHOT files without metadata)
- */
-public class DefaultProxyRequestHandler
- extends AbstractLogEnabled
- implements ProxyRequestHandler
-{
- /**
- * @plexus.requirement role-hint="default"
- * @todo use a map, and have priorities in them
- */
- private ArtifactDiscoverer defaultArtifactDiscoverer;
-
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactDiscoverer legacyArtifactDiscoverer;
-
- /**
- * @plexus.requirement role="org.apache.maven.wagon.Wagon"
- */
- private Map/*<String,Wagon>*/ wagons;
-
- public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException
- {
- return get( path, proxiedRepositories, managedRepository, null );
- }
-
- public File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException
- {
- return get( managedRepository, path, proxiedRepositories, wagonProxy, false );
- }
-
- public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException
- {
- return getAlways( path, proxiedRepositories, managedRepository, null );
- }
-
- public File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository,
- ProxyInfo wagonProxy )
- throws ResourceDoesNotExistException, ProxyException
- {
- return get( managedRepository, path, proxiedRepositories, wagonProxy, true );
- }
-
- private File get( ArtifactRepository managedRepository, String path, List proxiedRepositories, ProxyInfo wagonProxy,
- boolean force )
- throws ProxyException, ResourceDoesNotExistException
- {
- File target = new File( managedRepository.getBasedir(), path );
-
- for ( Iterator i = proxiedRepositories.iterator(); i.hasNext(); )
- {
- ProxiedArtifactRepository repository = (ProxiedArtifactRepository) i.next();
-
- if ( !force && repository.isCachedFailure( path ) )
- {
- processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
- }
- else
- {
- get( path, target, repository, managedRepository, wagonProxy, force );
- }
- }
-
- if ( !target.exists() )
- {
- throw new ResourceDoesNotExistException( "Could not find " + path + " in any of the repositories." );
- }
-
- return target;
- }
-
- private void get( String path, File target, ProxiedArtifactRepository repository,
- ArtifactRepository managedRepository, ProxyInfo wagonProxy, boolean force )
- throws ProxyException
- {
- ArtifactRepositoryPolicy policy;
-
- if ( path.endsWith( ".md5" ) || path.endsWith( ".sha1" ) )
- {
- // always read from the managed repository, no need to make remote request
- }
- else if ( path.endsWith( "maven-metadata.xml" ) )
- {
- File metadataFile = new File( target.getParentFile(), ".metadata-" + repository.getRepository().getId() );
-
- policy = repository.getRepository().getReleases();
-
- // if it is snapshot metadata, use a different policy
- if ( path.endsWith( "-SNAPSHOT/maven-metadata.xml" ) )
- {
- policy = repository.getRepository().getSnapshots();
- }
-
- if ( force || !metadataFile.exists() || isOutOfDate( policy, metadataFile ) )
- {
- getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, metadataFile,
- policy, force );
-
- mergeMetadataFiles( target, metadataFile );
- }
- }
- else
- {
- Artifact artifact = null;
- try
- {
- artifact = defaultArtifactDiscoverer.buildArtifact( path );
- }
- catch ( DiscovererException e )
- {
- getLogger().debug( "Failed to build artifact using default layout with message: " + e.getMessage() );
- }
-
- if ( artifact == null )
- {
- try
- {
- artifact = legacyArtifactDiscoverer.buildArtifact( path );
- }
- catch ( DiscovererException e )
- {
- getLogger().debug( "Failed to build artifact using legacy layout with message: " + e.getMessage() );
- }
- }
-
- if ( artifact != null )
- {
- ArtifactRepository artifactRepository = repository.getRepository();
-
- // we use the release policy for tracking failures, but only check for updates on snapshots
- // also, we don't look for updates on timestamp snapshot files, only non-unique-version ones
- policy = artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
-
- boolean needsUpdate = false;
- if ( artifact.getVersion().endsWith( "-SNAPSHOT" ) && isOutOfDate( policy, target ) )
- {
- needsUpdate = true;
- }
-
- if ( needsUpdate || force || !target.exists() )
- {
- getFileFromRepository( artifactRepository.pathOf( artifact ), repository,
- managedRepository.getBasedir(), wagonProxy, target, policy, force );
- }
- }
- else
- {
- // Some other unknown file in the repository, proxy as is
- if ( force || !target.exists() )
- {
- policy = repository.getRepository().getReleases();
- getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target, policy,
- force );
- }
- }
- }
-
- if ( target.exists() )
- {
- // in case it previously failed and we've since found it
- repository.clearFailure( path );
- }
- }
-
- private void mergeMetadataFiles( File target, File metadataFile )
- throws ProxyException
- {
- MetadataXpp3Reader reader = new MetadataXpp3Reader();
- if ( metadataFile.exists() )
- {
- Metadata metadata = null;
- if ( target.exists() )
- {
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( target );
- metadata = reader.read( fileReader );
- }
- catch ( XmlPullParserException e )
- {
- throw new ProxyException( "Unable to parse existing metadata: " + e.getMessage(), e );
- }
- catch ( IOException e )
- {
- throw new ProxyException( "Unable to read existing metadata: " + e.getMessage(), e );
- }
- finally
- {
- IOUtil.close( fileReader );
- }
- }
-
- FileReader fileReader = null;
- boolean changed = false;
- try
- {
- fileReader = new FileReader( metadataFile );
- Metadata newMetadata = reader.read( fileReader );
-
- if ( metadata != null )
- {
- changed = metadata.merge( newMetadata );
- }
- else
- {
- metadata = newMetadata;
- changed = true;
- }
- }
- catch ( IOException e )
- {
- // ignore the merged file
- getLogger().warn( "Unable to read new metadata: " + e.getMessage() );
- }
- catch ( XmlPullParserException e )
- {
- // ignore the merged file
- getLogger().warn( "Unable to parse new metadata: " + e.getMessage() );
- }
- finally
- {
- IOUtil.close( fileReader );
- }
-
- if ( changed )
- {
- FileWriter fileWriter = null;
- try
- {
- fileWriter = new FileWriter( target );
- new MetadataXpp3Writer().write( fileWriter, metadata );
- }
- catch ( IOException e )
- {
- getLogger().warn( "Unable to store new metadata: " + e.getMessage() );
- }
- finally
- {
- IOUtil.close( fileWriter );
- }
- }
- }
- }
-
- private void getFileFromRepository( String path, ProxiedArtifactRepository repository, String repositoryCachePath,
- ProxyInfo httpProxy, File target, ArtifactRepositoryPolicy policy,
- boolean force )
- throws ProxyException
- {
- if ( !policy.isEnabled() )
- {
- getLogger().debug( "Skipping disabled repository " + repository.getName() );
- return;
- }
-
- Map checksums = null;
- Wagon wagon = null;
-
- File temp = new File( target.getAbsolutePath() + ".tmp" );
- temp.deleteOnExit();
-
- boolean connected = false;
- try
- {
- String protocol = repository.getRepository().getProtocol();
- wagon = (Wagon) wagons.get( protocol );
- if ( wagon == null )
- {
- throw new ProxyException( "Unsupported remote protocol: " + protocol );
- }
-
- //@todo configure wagon (ssh settings, etc)
-
- checksums = prepareChecksumListeners( wagon );
-
- connected = connectToRepository( wagon, repository, httpProxy );
- if ( connected )
- {
- int tries = 0;
- boolean success;
-
- do
- {
- tries++;
-
- getLogger().debug( "Trying " + path + " from " + repository.getName() + "..." );
-
- boolean downloaded = true;
- if ( force || !target.exists() )
- {
- wagon.get( path, temp );
- }
- else
- {
- downloaded = wagon.getIfNewer( path, temp, target.lastModified() );
- }
-
- if ( downloaded )
- {
- success = checkChecksum( checksums, path, wagon, repositoryCachePath );
-
- if ( tries > 1 && !success )
- {
- processRepositoryFailure( repository,
- "Checksum failures occurred while downloading " + path, path,
- policy );
- return;
- }
- }
- else
- {
- // getIfNewer determined we were up to date
- success = true;
- }
- }
- while ( !success );
-
- // temp won't exist if we called getIfNewer and it was older, but its still a successful return
- if ( temp.exists() )
- {
- moveTempToTarget( temp, target );
- }
- }
- //try next repository
- }
- catch ( TransferFailedException e )
- {
- processRepositoryFailure( repository, e, path, policy );
- }
- catch ( AuthorizationException e )
- {
- processRepositoryFailure( repository, e, path, policy );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // hard failure setting doesn't affect "not found".
- getLogger().debug( "Artifact not found in repository: " + repository.getName() + ": " + e.getMessage() );
- }
- finally
- {
- temp.delete();
-
- if ( wagon != null && checksums != null )
- {
- releaseChecksumListeners( wagon, checksums );
- }
-
- if ( connected )
- {
- disconnectWagon( wagon );
- }
- }
- }
-
- private static boolean isOutOfDate( ArtifactRepositoryPolicy policy, File target )
- {
- return policy != null && policy.checkOutOfDate( new Date( target.lastModified() ) );
- }
-
- /**
- * Used to add checksum observers as transfer listeners to the wagonManager object
- *
- * @param wagon the wagonManager object to use the checksum with
- * @return map of ChecksumObservers added into the wagonManager transfer listeners
- */
- private Map prepareChecksumListeners( Wagon wagon )
- {
- Map checksums = new LinkedHashMap();
- try
- {
- ChecksumObserver checksum = new ChecksumObserver( "SHA-1" );
- wagon.addTransferListener( checksum );
- checksums.put( "sha1", checksum );
-
- checksum = new ChecksumObserver( "MD5" );
- wagon.addTransferListener( checksum );
- checksums.put( "md5", checksum );
- }
- catch ( NoSuchAlgorithmException e )
- {
- getLogger().error( "An error occurred while preparing checksum observers: " + e.getMessage() );
- }
- return checksums;
- }
-
- private void releaseChecksumListeners( Wagon wagon, Map checksumMap )
- {
- for ( Iterator checksums = checksumMap.values().iterator(); checksums.hasNext(); )
- {
- ChecksumObserver listener = (ChecksumObserver) checksums.next();
- wagon.removeTransferListener( listener );
- }
- }
-
- private boolean connectToRepository( Wagon wagon, ProxiedArtifactRepository repository, ProxyInfo httpProxy )
- {
- boolean connected = false;
- try
- {
- ArtifactRepository artifactRepository = repository.getRepository();
- Repository wagonRepository = new Repository( artifactRepository.getId(), artifactRepository.getUrl() );
- if ( repository.isUseNetworkProxy() && httpProxy != null )
- {
- wagon.connect( wagonRepository, httpProxy );
- }
- else
- {
- wagon.connect( wagonRepository );
- }
- connected = true;
- }
- catch ( ConnectionException e )
- {
- getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
- }
- catch ( AuthenticationException e )
- {
- getLogger().info( "Could not connect to " + repository.getName() + ": " + e.getMessage() );
- }
-
- return connected;
- }
-
- private boolean checkChecksum( Map checksumMap, String path, Wagon wagon, String repositoryCachePath )
- throws ProxyException
- {
- releaseChecksumListeners( wagon, checksumMap );
-
- boolean correctChecksum = false;
-
- boolean allNotFound = true;
-
- for ( Iterator i = checksumMap.keySet().iterator(); i.hasNext() && !correctChecksum; )
- {
- String checksumExt = (String) i.next();
- ChecksumObserver checksum = (ChecksumObserver) checksumMap.get( checksumExt );
- String checksumPath = path + "." + checksumExt;
- File checksumFile = new File( repositoryCachePath, checksumPath );
-
- File tempChecksumFile = new File( checksumFile.getAbsolutePath() + ".tmp" );
- tempChecksumFile.deleteOnExit();
-
- try
- {
- wagon.get( checksumPath, tempChecksumFile );
-
- allNotFound = false;
-
- String remoteChecksum = DigestUtils.cleanChecksum( FileUtils.fileRead( tempChecksumFile ),
- checksumExt.toUpperCase(),
- path.substring( path.lastIndexOf( '/' ) ) );
-
- String actualChecksum = checksum.getActualChecksum().toUpperCase();
- remoteChecksum = remoteChecksum.toUpperCase();
-
- if ( remoteChecksum.equals( actualChecksum ) )
- {
- moveTempToTarget( tempChecksumFile, checksumFile );
-
- correctChecksum = true;
- }
- else
- {
- getLogger().warn(
- "The checksum '" + actualChecksum + "' did not match the remote value: " + remoteChecksum );
- }
- }
- catch ( TransferFailedException e )
- {
- getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( ResourceDoesNotExistException e )
- {
- getLogger().debug( "The checksum did not exist: " + checksumPath + "; " + e.getMessage() );
- // do nothing try the next checksum
- // remove it if it is present locally in case there is an old incorrect one
- if ( checksumFile.exists() )
- {
- checksumFile.delete();
- }
- }
- catch ( AuthorizationException e )
- {
- getLogger().warn( "An error occurred during the download of " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( IOException e )
- {
- getLogger().warn( "An error occurred while reading the temporary checksum file: " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- catch ( DigesterException e )
- {
- getLogger().warn( "The checksum was invalid: " + checksumPath + ": " + e.getMessage() );
- // do nothing try the next checksum
-
- allNotFound = false;
- }
- finally
- {
- tempChecksumFile.delete();
- }
- }
- return correctChecksum || allNotFound;
- }
-
- /**
- * Used to move the temporary file to its real destination. This is patterned from the way WagonManager handles
- * its downloaded files.
- *
- * @param temp The completed download file
- * @param target The final location of the downloaded file
- * @throws ProxyException when the temp file cannot replace the target file
- */
- private void moveTempToTarget( File temp, File target )
- throws ProxyException
- {
- if ( target.exists() && !target.delete() )
- {
- throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
- }
-
- if ( !temp.renameTo( target ) )
- {
- getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." );
-
- try
- {
- FileUtils.copyFile( temp, target );
- }
- catch ( IOException e )
- {
- throw new ProxyException( "Cannot copy tmp file to its final location", e );
- }
- finally
- {
- temp.delete();
- }
- }
- }
-
- /**
- * Used to disconnect the wagonManager from its repository
- *
- * @param wagon the connected wagonManager object
- */
- private void disconnectWagon( Wagon wagon )
- {
- try
- {
- wagon.disconnect();
- }
- catch ( ConnectionException e )
- {
- getLogger().error( "Problem disconnecting from wagonManager - ignoring: " + e.getMessage() );
- }
- }
-
- private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t, String path,
- ArtifactRepositoryPolicy policy )
- throws ProxyException
- {
- repository.addFailure( path, policy );
-
- String message = t.getMessage();
- if ( repository.isHardFail() )
- {
- repository.addFailure( path, policy );
- throw new ProxyException(
- "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message, t );
- }
-
- getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
- getLogger().debug( "Cause", t );
- }
-
- private void processRepositoryFailure( ProxiedArtifactRepository repository, String message, String path,
- ArtifactRepositoryPolicy policy )
- throws ProxyException
- {
- repository.addFailure( path, policy );
-
- processCachedRepositoryFailure( repository, message );
- }
-
- private void processCachedRepositoryFailure( ProxiedArtifactRepository repository, String message )
- throws ProxyException
- {
- if ( repository.isHardFail() )
- {
- throw new ProxyException(
- "An error occurred in hardfailing repository " + repository.getName() + "...\n " + message );
- }
-
- getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-
-import java.util.Calendar;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * A proxied artifact repository - contains the artifact repository and additional information about
- * the proxied repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class ProxiedArtifactRepository
-{
- /**
- * Whether to cache failures or not.
- */
- private boolean cacheFailures;
-
- /**
- * Whether failures on this repository cause the whole group to fail.
- */
- private boolean hardFail;
-
- /**
- * Whether to use the network proxy for any requests.
- */
- private boolean useNetworkProxy;
-
- /**
- * The artifact repository on the other end of the proxy.
- */
- private final ArtifactRepository repository;
-
- /**
- * Cache of failures that have already occurred, containing paths from the repository root. The value given
- * specifies when the failure should expire.
- */
- private Map/*<String,Long>*/ failureCache = new HashMap/*<String,Long>*/();
-
- /**
- * A user friendly name for the repository.
- */
- private String name;
-
- public ProxiedArtifactRepository( ArtifactRepository repository )
- {
- this.repository = repository;
- }
-
- public boolean isHardFail()
- {
- return hardFail;
- }
-
- public boolean isUseNetworkProxy()
- {
- return useNetworkProxy;
- }
-
- public boolean isCacheFailures()
- {
- return cacheFailures;
- }
-
- public ArtifactRepository getRepository()
- {
- return repository;
- }
-
- /**
- * Check if there is a previously cached failure for requesting the given path.
- *
- * @param path the path
- * @return whether there is a failure
- */
- public boolean isCachedFailure( String path )
- {
- boolean failed = false;
- if ( cacheFailures )
- {
- Long time = (Long) failureCache.get( path );
- if ( time != null )
- {
- if ( System.currentTimeMillis() < time.longValue() )
- {
- failed = true;
- }
- else
- {
- clearFailure( path );
- }
- }
- }
- return failed;
- }
-
- /**
- * Add a failure to the cache.
- *
- * @param path the path that failed
- * @param policy the policy for when the failure should expire
- */
- public void addFailure( String path, ArtifactRepositoryPolicy policy )
- {
- failureCache.put( path, new Long( calculateExpiryTime( policy ) ) );
- }
-
- private long calculateExpiryTime( ArtifactRepositoryPolicy policy )
- {
- String updatePolicy = policy.getUpdatePolicy();
- long time;
- if ( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS.equals( updatePolicy ) )
- {
- time = 0;
- }
- else if ( ArtifactRepositoryPolicy.UPDATE_POLICY_DAILY.equals( updatePolicy ) )
- {
- // Get midnight boundary
- Calendar cal = Calendar.getInstance();
- cal.set( Calendar.HOUR_OF_DAY, 0 );
- cal.set( Calendar.MINUTE, 0 );
- cal.set( Calendar.SECOND, 0 );
- cal.set( Calendar.MILLISECOND, 0 );
- cal.add( Calendar.DAY_OF_MONTH, 1 );
- time = cal.getTime().getTime();
- }
- else if ( updatePolicy.startsWith( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL ) )
- {
- String s = updatePolicy.substring( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL.length() + 1 );
- int minutes = Integer.valueOf( s ).intValue();
- Calendar cal = Calendar.getInstance();
- cal.add( Calendar.MINUTE, minutes );
- time = cal.getTime().getTime();
- }
- else
- {
- // else assume "never"
- time = Long.MAX_VALUE;
- }
- return time;
- }
-
- /**
- * Remove a failure.
- *
- * @param path the path that had previously failed
- */
- public void clearFailure( String path )
- {
- failureCache.remove( path );
- }
-
- public String getName()
- {
- return name;
- }
-
- public void setCacheFailures( boolean cacheFailures )
- {
- this.cacheFailures = cacheFailures;
- }
-
- public void setHardFail( boolean hardFail )
- {
- this.hardFail = hardFail;
- }
-
- public void setUseNetworkProxy( boolean useNetworkProxy )
- {
- this.useNetworkProxy = useNetworkProxy;
- }
-
- public void setName( String name )
- {
- this.name = name;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class ProxyException
- extends Exception
-{
- public ProxyException( String message )
- {
- super( message );
- }
-
- public ProxyException( String message, Throwable t )
- {
- super( message, t );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * An individual request handler for the proxy.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface ProxyRequestHandler
-{
- /**
- * The Plexus role of the component.
- */
- String ROLE = ProxyRequestHandler.class.getName();
-
- /**
- * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File get( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to retrieve an artifact at a particular path, giving the cached version if it exists.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @param wagonProxy a network proxy to use when transferring files if needed
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File get( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to force remote download of the requested path from any the configured repositories. This method will
- * only bypass the cache for searching but the requested path will still be cached.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository )
- throws ProxyException, ResourceDoesNotExistException;
-
- /**
- * Used to force remote download of the requested path from any the configured repositories. This method will
- * only bypass the cache for searching but the requested path will still be cached.
- *
- * @param path the expected repository path
- * @param proxiedRepositories the repositories being proxied to
- * @param managedRepository the locally managed repository to cache artifacts in
- * @param wagonProxy a network proxy to use when transferring files if needed
- * @return File object referencing the requested path in the cache
- * @throws ProxyException when an exception occurred during the retrieval of the requested path
- * @throws org.apache.maven.wagon.ResourceDoesNotExistException
- * when the requested object can't be found in any of the
- * configured repositories
- */
- File getAlways( String path, List proxiedRepositories, ArtifactRepository managedRepository, ProxyInfo wagonProxy )
- throws ProxyException, ResourceDoesNotExistException;
-}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.codehaus.plexus.PlexusTestCase;
+import org.codehaus.plexus.util.FileUtils;
+import org.easymock.MockControl;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.net.MalformedURLException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * Test the proxy handler.
+ *
+ * @author Brett Porter
+ */
+public class ProxyRequestHandlerTest
+ extends PlexusTestCase
+{
+ private ProxyRequestHandler requestHandler;
+
+ private List proxiedRepositories;
+
+ private List legacyProxiedRepositories;
+
+ private ArtifactRepository defaultManagedRepository;
+
+ private ArtifactRepository legacyManagedRepository;
+
+ private ArtifactRepository proxiedRepository1;
+
+ private ArtifactRepository proxiedRepository2;
+
+ private ArtifactRepository legacyProxiedRepository;
+
+ private ArtifactRepositoryLayout defaultLayout;
+
+ private ArtifactRepositoryFactory factory;
+
+ private MockControl wagonMockControl;
+
+ private Wagon wagonMock;
+
+ private static final ArtifactRepositoryPolicy DEFAULT_POLICY =
+ new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER, null );
+
+ private static final ArtifactRepositoryPolicy ALWAYS_UPDATE_POLICY =
+ new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS, null );
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ requestHandler = (ProxyRequestHandler) lookup( ProxyRequestHandler.ROLE );
+
+ factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+
+ File repoLocation = getTestFile( "target/test-repository/managed" );
+ // faster only to delete this one before copying, the others are done case by case
+ FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/managed" ), repoLocation );
+
+ defaultLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ defaultManagedRepository = createRepository( "managed-repository", repoLocation );
+
+ repoLocation = getTestFile( "target/test-repository/legacy-managed" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/legacy-managed" ), repoLocation );
+
+ ArtifactRepositoryLayout legacyLayout =
+ (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
+
+ legacyManagedRepository = createRepository( "managed-repository", repoLocation );
+
+ File location = getTestFile( "src/test/repositories/proxied1" );
+ proxiedRepository1 = createRepository( "proxied1", location );
+
+ location = getTestFile( "src/test/repositories/proxied2" );
+ proxiedRepository2 = createRepository( "proxied2", location );
+
+ proxiedRepositories = new ArrayList( 2 );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ location = getTestFile( "src/test/repositories/legacy-proxied" );
+ legacyProxiedRepository = createRepository( "legacy-proxied", location, legacyLayout );
+
+ legacyProxiedRepositories = Collections.singletonList( createProxiedRepository( legacyProxiedRepository ) );
+
+ wagonMockControl = MockControl.createNiceControl( Wagon.class );
+ wagonMock = (Wagon) wagonMockControl.getMock();
+ WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
+ delegate.setDelegate( wagonMock );
+ }
+
+ public void testGetDefaultLayoutNotPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testGetDefaultLayoutAlreadyPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testGetDefaultLayoutRemoteUpdate()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetWhenInBothProxiedRepos()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetInSecondProxiedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testNotFoundInAnyProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/does-not-exist/1.0/does-not-exist-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "File returned was: " + file + "; should have got a not found exception" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected, but check file was not created
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFails()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ wagonMockControl.verify();
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetButAllRepositoriesFail()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepository2 = createRepository( "proxied2", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+ ProxiedArtifactRepository proxiedArtifactRepository2 = createProxiedRepository( proxiedRepository2 );
+ proxiedRepositories.add( proxiedArtifactRepository2 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+ assertTrue( "Check failure", proxiedArtifactRepository1.isCachedFailure( path ) );
+ assertTrue( "Check failure", proxiedArtifactRepository2.isCachedFailure( path ) );
+
+ // TODO: do we really want failures to present as a not found?
+ // TODO: How much information on each failure should we pass back to the user vs. logging in the proxy?
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstHardFails()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ TransferFailedException failedException = new TransferFailedException( "transfer failed" );
+ wagonMockControl.setThrowable( failedException );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ProxyException e )
+ {
+ // expect a failure
+ wagonMockControl.verify();
+
+ assertEquals( "Check cause", failedException, e.getCause() );
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFailsFromCache()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ // fail from the cache, even though it is in the first repo now
+
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ // fail from the cache, even though it is in the first repo now
+
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ProxyException e )
+ {
+ // expect a failure
+ assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+ }
+
+ public void testGetInSecondProxiedRepoFirstFailsDisabledCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
+
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedArtifactRepository.setCacheFailures( false );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ wagonMockControl.verify();
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetWhenInBothProxiedReposFirstHasExpiredCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testGetAlwaysAlreadyPresent()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetAlwaysAlreadyPresentRemovedFromProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-removed-from-proxies/1.0/get-removed-from-proxies-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ // TODO: is this the correct behaviour, or should it be considered removed too?
+ }
+
+ public void testGetAlwaysWithCachedFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetRemovesTemporaryFileOnSuccess()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File tempFile = new File( file.getParentFile(), file.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+
+ public void testGetRemovesTemporaryFileOnError()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+ }
+
+ public void testGetRemovesTemporaryChecksumFileOnSuccess()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File tempFile = new File( file.getParentFile(), file.getName() + ".sha1.tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+
+ public void testGetRemovesTemporaryChecksumFileOnError()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+
+ mockFailedChecksums( path, expectedFile );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+
+ tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" );
+ assertFalse( "Check temporary file removed", tempFile.exists() );
+ }
+ }
+
+ public void testGetChecksumBothCorrect()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-both-right/1.0/get-checksum-both-right-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
+ FileUtils.fileRead( checksumFile ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectSha1NoMd5()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
+ FileUtils.fileRead( checksumFile ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectSha1BadMd5()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "sha1" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
+ FileUtils.fileRead( checksumFile ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ }
+
+ public void testGetCorrectMd5NoSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "md5" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar",
+ FileUtils.fileRead( checksumFile ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetCorrectMd5BadSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-md5-bad-sha1/1.0/get-checksum-md5-bad-sha1-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File checksumFile = getChecksumFile( file, "md5" );
+ assertTrue( "Check file created", checksumFile.exists() );
+ assertEquals( "Check checksum", "8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar",
+ FileUtils.fileRead( checksumFile ).trim() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetWithNoChecksums()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
+ }
+
+ public void testGetBadMd5BadSha1()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expect a failure
+ assertFalse( "Check file not created", expectedFile.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
+ }
+ }
+
+ public void testGetChecksumTransferFailed()
+ throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
+ AuthorizationException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepository1 = createRepository( "proxied1", "test://..." );
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
+ proxiedRepositories.add( proxiedArtifactRepository1 );
+
+ wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
+
+ mockFailedChecksums( path, expectedFile );
+
+ wagonMockControl.replay();
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // as expected
+ wagonMockControl.verify();
+
+ assertFalse( "Check file not created", expectedFile.exists() );
+
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
+ assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
+ }
+ }
+
+ public void testGetAlwaysBadChecksumPresentLocallyAbsentRemote()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+
+ assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".sha1" ).exists() );
+ assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".md5" ).exists() );
+ }
+
+ public void testGetChecksumPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetAlwaysChecksumPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetChecksumNotPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetAlwaysChecksumNotPresentInManagedRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetMetadataNotPresent()
+ throws ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "Found file: " + file + "; but was expecting a failure" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected
+
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testGetMetadataProxied()
+ throws ProxyException, ResourceDoesNotExistException, IOException
+ {
+ String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ FileUtils.deleteDirectory( expectedFile.getParentFile() );
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ String expectedContents = getExpectedMetadata( "get-default-metadata", "1.0" );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testGetMetadataMergeRepos()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents = getExpectedMetadata( "get-merged-metadata", getVersioning(
+ Arrays.asList( new String[]{"0.9", "1.0", "2.0", "3.0", "5.0", "4.0"} ) ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testGetMetadataRemovedFromProxies()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testGetReleaseMetadataNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+
+ String unexpectedContents = FileUtils.fileRead( new File( proxiedRepository1.getBasedir(), path ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetSnapshotMetadataNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+
+ String unexpectedContents = FileUtils.fileRead( new File( proxiedRepository1.getBasedir(), path ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetReleaseMetadataExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents =
+ getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetSnapshotMetadataExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents =
+ getExpectedMetadata( "get-updated-metadata", "1.0-SNAPSHOT", getVersioning( "20050831.111213", 2 ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetMetadataNotUpdated()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( proxiedFile.lastModified() );
+
+ proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check content doesn't match proxy version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetMetadataUpdated()
+ throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents =
+ getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check content doesn't match old version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testGetAlwaysMetadata()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ String expectedContents =
+ getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
+
+ assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
+ assertFalse( "Check content doesn't match old version",
+ unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testSnapshotNonExistant()
+ throws ProxyException, IOException
+ {
+ String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ try
+ {
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+ fail( "File returned was: " + file + "; should have got a not found exception" );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // expected, but check file was not created
+ assertFalse( expectedFile.exists() );
+ }
+ }
+
+ public void testTimestampDrivenSnapshotNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testNewerTimestampDrivenSnapshotOnFirstRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testOlderTimestampDrivenSnapshotOnFirstRepo()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getFutureDate().getTime() );
+
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+/* TODO: won't pass until Wagon preserves timestamp on download
+ public void testNewerTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File repoLocation = getTestFile( "target/test-repository/proxied1" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/proxied1" ), repoLocation );
+ proxiedRepository1 = createRepository( "proxied1", repoLocation );
+
+ new File( proxiedRepository1.getBasedir(), path ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepositories.clear();
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+*/
+
+ public void testOlderTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
+ throws ParseException, ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File repoLocation = getTestFile( "target/test-repository/proxied2" );
+ FileUtils.deleteDirectory( repoLocation );
+ copyDirectoryStructure( getTestFile( "src/test/repositories/proxied2" ), repoLocation );
+ proxiedRepository2 = createRepository( "proxied2", repoLocation );
+
+ new File( proxiedRepository2.getBasedir(), path ).setLastModified( getPastDate().getTime() );
+
+ proxiedRepositories.clear();
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ proxiedFile = new File( proxiedRepository2.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotExpired()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ proxiedFile.setLastModified( getFutureDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotUpdated()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path =
+ "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ expectedFile.setLastModified( proxiedFile.lastModified() );
+
+ proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testTimestampDrivenSnapshotNotPresentAlreadyExpiredCacheFailure()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ proxiedRepositories.clear();
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
+ proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
+ proxiedRepositories.add( proxiedArtifactRepository );
+ proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+
+ assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
+ }
+
+ public void testMetadataDrivenSnapshotNotPresentAlready()
+ throws ResourceDoesNotExistException, ProxyException, IOException
+ {
+ String path =
+ "org/apache/maven/test/get-metadata-snapshot/1.0-SNAPSHOT/get-metadata-snapshot-1.0-20050831.101112-1.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ }
+
+ public void testGetMetadataDrivenSnapshotRemoteUpdate()
+ throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
+ {
+ // Metadata driven snapshots (using a full timestamp) are treated like a release. It is the timing of the
+ // updates to the metadata files that triggers which will be downloaded
+
+ String path =
+ "org/apache/maven/test/get-present-metadata-snapshot/1.0-SNAPSHOT/get-present-metadata-snapshot-1.0-20050831.101112-1.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+
+ assertTrue( expectedFile.exists() );
+
+ expectedFile.setLastModified( getPastDate().getTime() );
+
+ File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ }
+
+ public void testLegacyManagedRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(),
+ "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar" );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyManagedRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( proxiedRepository1.getBasedir(),
+ "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar" );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testLegacyProxyRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+
+ expectedFile.delete();
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile =
+ new File( legacyProxiedRepository.getBasedir(), "org.apache.maven.test/jars/get-default-layout-1.0.jar" );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyProxyRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
+ "org.apache.maven.test/jars/get-default-layout-present-1.0.jar" );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ public void testLegacyManagedAndProxyRepoGetNotPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+
+ assertFalse( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( proxiedFile );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ // TODO: timestamp preservation requires support for that in wagon
+// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
+ }
+
+ public void testLegacyManagedAndProxyRepoGetAlreadyPresent()
+ throws IOException, ResourceDoesNotExistException, ProxyException
+ {
+ String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
+ File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
+ String expectedContents = FileUtils.fileRead( expectedFile );
+ long originalModificationTime = expectedFile.lastModified();
+
+ assertTrue( expectedFile.exists() );
+
+ File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
+
+ assertEquals( "Check file matches", expectedFile, file );
+ assertTrue( "Check file created", file.exists() );
+ assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
+ File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
+ String unexpectedContents = FileUtils.fileRead( proxiedFile );
+ assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
+ assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
+ assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
+ file.lastModified() );
+ }
+
+ private static Versioning getVersioning( List versions )
+ {
+ Versioning versioning = new Versioning();
+ for ( Iterator i = versions.iterator(); i.hasNext(); )
+ {
+ String v = (String) i.next();
+ versioning.addVersion( v );
+ }
+ return versioning;
+ }
+
+ private static String getExpectedMetadata( String artifactId, Versioning versioning )
+ throws IOException
+ {
+ return getExpectedMetadata( artifactId, null, versioning );
+ }
+
+ private static String getExpectedMetadata( String artifactId, String version, Versioning versioning )
+ throws IOException
+ {
+ StringWriter expectedContents = new StringWriter();
+
+ Metadata m = new Metadata();
+ m.setGroupId( "org.apache.maven.test" );
+ m.setArtifactId( artifactId );
+ m.setVersion( version );
+ m.setVersioning( versioning );
+ m.setModelEncoding( null );
+
+ new MetadataXpp3Writer().write( expectedContents, m );
+ return expectedContents.toString();
+ }
+
+ private static String getExpectedMetadata( String artifactId, String version )
+ throws IOException
+ {
+ return getExpectedMetadata( artifactId, version, null );
+ }
+
+ private static Versioning getVersioning( String timestamp, int buildNumber )
+ {
+ Versioning versioning = new Versioning();
+ versioning.setSnapshot( new Snapshot() );
+ versioning.getSnapshot().setTimestamp( timestamp );
+ versioning.getSnapshot().setBuildNumber( buildNumber );
+ return versioning;
+ }
+
+ private static Date getPastDate()
+ throws ParseException
+ {
+ return new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" );
+ }
+
+ private static Date getFutureDate()
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.add( Calendar.YEAR, 1 );
+ return cal.getTime();
+ }
+
+ private void mockFailedChecksums( String path, File expectedFile )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ // must do it twice as it will re-attempt it
+ wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+
+ wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
+ wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
+ }
+
+ private File getChecksumFile( File file, String algorithm )
+ {
+ return new File( file.getParentFile(), file.getName() + "." + algorithm );
+ }
+
+ /**
+ * A faster recursive copy that omits .svn directories.
+ *
+ * @param sourceDirectory the source directory to copy
+ * @param destDirectory the target location
+ * @throws java.io.IOException if there is a copying problem
+ * @todo get back into plexus-utils, share with converter module
+ */
+ private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
+ throws IOException
+ {
+ if ( !sourceDirectory.exists() )
+ {
+ throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
+ }
+
+ File[] files = sourceDirectory.listFiles();
+
+ String sourcePath = sourceDirectory.getAbsolutePath();
+
+ for ( int i = 0; i < files.length; i++ )
+ {
+ File file = files[i];
+
+ String dest = file.getAbsolutePath();
+
+ dest = dest.substring( sourcePath.length() + 1 );
+
+ File destination = new File( destDirectory, dest );
+
+ if ( file.isFile() )
+ {
+ destination = destination.getParentFile();
+
+ FileUtils.copyFileToDirectory( file, destination );
+ }
+ else if ( file.isDirectory() )
+ {
+ if ( !".svn".equals( file.getName() ) )
+ {
+ if ( !destination.exists() && !destination.mkdirs() )
+ {
+ throw new IOException(
+ "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+ }
+
+ copyDirectoryStructure( file, destination );
+ }
+ }
+ else
+ {
+ throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
+ }
+ }
+ }
+
+ private static ProxiedArtifactRepository createProxiedRepository( ArtifactRepository repository )
+ {
+ ProxiedArtifactRepository proxiedArtifactRepository = new ProxiedArtifactRepository( repository );
+ proxiedArtifactRepository.setName( repository.getId() );
+ proxiedArtifactRepository.setCacheFailures( true );
+ return proxiedArtifactRepository;
+ }
+
+ private static ProxiedArtifactRepository createHardFailProxiedRepository( ArtifactRepository repository )
+ {
+ ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( repository );
+ proxiedArtifactRepository.setHardFail( true );
+ return proxiedArtifactRepository;
+ }
+
+ private ArtifactRepository createRepository( String id, File repoLocation )
+ throws MalformedURLException
+ {
+ return createRepository( id, repoLocation.toURI().toURL().toExternalForm() );
+ }
+
+ private ArtifactRepository createRepository( String id, File location, ArtifactRepositoryLayout layout )
+ throws MalformedURLException
+ {
+ return createRepository( id, location.toURI().toURL().toExternalForm(), layout );
+ }
+
+ private ArtifactRepository createRepository( String id, String url )
+ {
+ return createRepository( id, url, defaultLayout );
+ }
+
+ private ArtifactRepository createRepository( String id, String url, ArtifactRepositoryLayout repositoryLayout )
+ {
+ return factory.createArtifactRepository( id, url, repositoryLayout, null, null );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.SessionListener;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.repository.Repository;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * A dummy wagon implementation
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class WagonDelegate
+ implements Wagon
+{
+ private Wagon delegate;
+
+ private String contentToGet;
+
+ public void get( String resourceName, File destination )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.get( resourceName, destination );
+ create( destination );
+ }
+
+ public boolean getIfNewer( String resourceName, File destination, long timestamp )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
+ createIfMissing( destination );
+ return result;
+ }
+
+ public void put( File source, String destination )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.put( source, destination );
+ }
+
+ public void putDirectory( File sourceDirectory, String destinationDirectory )
+ throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
+ {
+ delegate.putDirectory( sourceDirectory, destinationDirectory );
+ }
+
+ public boolean supportsDirectoryCopy()
+ {
+ return delegate.supportsDirectoryCopy();
+ }
+
+ public Repository getRepository()
+ {
+ return delegate.getRepository();
+ }
+
+ public void connect( Repository source )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source );
+ }
+
+ public void connect( Repository source, ProxyInfo proxyInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, proxyInfo );
+ }
+
+ public void connect( Repository source, AuthenticationInfo authenticationInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, authenticationInfo );
+ }
+
+ public void connect( Repository source, AuthenticationInfo authenticationInfo, ProxyInfo proxyInfo )
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.connect( source, authenticationInfo, proxyInfo );
+ }
+
+ public void openConnection()
+ throws ConnectionException, AuthenticationException
+ {
+ delegate.openConnection();
+ }
+
+ public void disconnect()
+ throws ConnectionException
+ {
+ delegate.disconnect();
+ }
+
+ public void addSessionListener( SessionListener listener )
+ {
+ delegate.addSessionListener( listener );
+ }
+
+ public void removeSessionListener( SessionListener listener )
+ {
+ delegate.removeSessionListener( listener );
+ }
+
+ public boolean hasSessionListener( SessionListener listener )
+ {
+ return delegate.hasSessionListener( listener );
+ }
+
+ public void addTransferListener( TransferListener listener )
+ {
+ delegate.addTransferListener( listener );
+ }
+
+ public void removeTransferListener( TransferListener listener )
+ {
+ delegate.removeTransferListener( listener );
+ }
+
+ public boolean hasTransferListener( TransferListener listener )
+ {
+ return delegate.hasTransferListener( listener );
+ }
+
+ public boolean isInteractive()
+ {
+ return delegate.isInteractive();
+ }
+
+ public void setInteractive( boolean interactive )
+ {
+ delegate.setInteractive( interactive );
+ }
+
+ public void setDelegate( Wagon delegate )
+ {
+ this.delegate = delegate;
+ }
+
+ void setContentToGet( String content )
+ {
+ contentToGet = content;
+ }
+
+ private void createIfMissing( File destination )
+ {
+ // since the mock won't actually copy a file, create an empty one to simulate file existence
+ if ( !destination.exists() )
+ {
+ create( destination );
+ }
+ }
+
+ private void create( File destination )
+ {
+ try
+ {
+ destination.getParentFile().mkdirs();
+ if ( contentToGet == null )
+ {
+ destination.createNewFile();
+ }
+ else
+ {
+ FileUtils.fileWrite( destination.getAbsolutePath(), contentToGet );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new RuntimeException( e.getMessage(), e );
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.util.FileUtils;
-import org.easymock.MockControl;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * Test the proxy handler.
- *
- * @author Brett Porter
- */
-public class ProxyRequestHandlerTest
- extends PlexusTestCase
-{
- private ProxyRequestHandler requestHandler;
-
- private List proxiedRepositories;
-
- private List legacyProxiedRepositories;
-
- private ArtifactRepository defaultManagedRepository;
-
- private ArtifactRepository legacyManagedRepository;
-
- private ArtifactRepository proxiedRepository1;
-
- private ArtifactRepository proxiedRepository2;
-
- private ArtifactRepository legacyProxiedRepository;
-
- private ArtifactRepositoryLayout defaultLayout;
-
- private ArtifactRepositoryFactory factory;
-
- private MockControl wagonMockControl;
-
- private Wagon wagonMock;
-
- private static final ArtifactRepositoryPolicy DEFAULT_POLICY =
- new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER, null );
-
- private static final ArtifactRepositoryPolicy ALWAYS_UPDATE_POLICY =
- new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS, null );
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- requestHandler = (ProxyRequestHandler) lookup( ProxyRequestHandler.ROLE );
-
- factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- File repoLocation = getTestFile( "target/test-repository/managed" );
- // faster only to delete this one before copying, the others are done case by case
- FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
- copyDirectoryStructure( getTestFile( "src/test/repositories/managed" ), repoLocation );
-
- defaultLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- defaultManagedRepository = createRepository( "managed-repository", repoLocation );
-
- repoLocation = getTestFile( "target/test-repository/legacy-managed" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/legacy-managed" ), repoLocation );
-
- ArtifactRepositoryLayout legacyLayout =
- (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
-
- legacyManagedRepository = createRepository( "managed-repository", repoLocation );
-
- File location = getTestFile( "src/test/repositories/proxied1" );
- proxiedRepository1 = createRepository( "proxied1", location );
-
- location = getTestFile( "src/test/repositories/proxied2" );
- proxiedRepository2 = createRepository( "proxied2", location );
-
- proxiedRepositories = new ArrayList( 2 );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- location = getTestFile( "src/test/repositories/legacy-proxied" );
- legacyProxiedRepository = createRepository( "legacy-proxied", location, legacyLayout );
-
- legacyProxiedRepositories = Collections.singletonList( createProxiedRepository( legacyProxiedRepository ) );
-
- wagonMockControl = MockControl.createNiceControl( Wagon.class );
- wagonMock = (Wagon) wagonMockControl.getMock();
- WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
- delegate.setDelegate( wagonMock );
- }
-
- public void testGetDefaultLayoutNotPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testGetDefaultLayoutAlreadyPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testGetDefaultLayoutRemoteUpdate()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetWhenInBothProxiedRepos()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetInSecondProxiedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testNotFoundInAnyProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/does-not-exist/1.0/does-not-exist-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "File returned was: " + file + "; should have got a not found exception" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected, but check file was not created
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFails()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- wagonMockControl.verify();
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetButAllRepositoriesFail()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepository2 = createRepository( "proxied2", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
- ProxiedArtifactRepository proxiedArtifactRepository2 = createProxiedRepository( proxiedRepository2 );
- proxiedRepositories.add( proxiedArtifactRepository2 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
- assertTrue( "Check failure", proxiedArtifactRepository1.isCachedFailure( path ) );
- assertTrue( "Check failure", proxiedArtifactRepository2.isCachedFailure( path ) );
-
- // TODO: do we really want failures to present as a not found?
- // TODO: How much information on each failure should we pass back to the user vs. logging in the proxy?
- }
- }
-
- public void testGetInSecondProxiedRepoFirstHardFails()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- TransferFailedException failedException = new TransferFailedException( "transfer failed" );
- wagonMockControl.setThrowable( failedException );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ProxyException e )
- {
- // expect a failure
- wagonMockControl.verify();
-
- assertEquals( "Check cause", failedException, e.getCause() );
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFailsFromCache()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- // fail from the cache, even though it is in the first repo now
-
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- // fail from the cache, even though it is in the first repo now
-
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ProxyException e )
- {
- // expect a failure
- assertTrue( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
- }
-
- public void testGetInSecondProxiedRepoFirstFailsDisabledCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
-
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedArtifactRepository.setCacheFailures( false );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- wagonMockControl.verify();
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetWhenInBothProxiedReposFirstHasExpiredCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testGetAlwaysAlreadyPresent()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetAlwaysAlreadyPresentRemovedFromProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-removed-from-proxies/1.0/get-removed-from-proxies-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- // TODO: is this the correct behaviour, or should it be considered removed too?
- }
-
- public void testGetAlwaysWithCachedFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetRemovesTemporaryFileOnSuccess()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File tempFile = new File( file.getParentFile(), file.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
-
- public void testGetRemovesTemporaryFileOnError()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
- }
-
- public void testGetRemovesTemporaryChecksumFileOnSuccess()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File tempFile = new File( file.getParentFile(), file.getName() + ".sha1.tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
-
- public void testGetRemovesTemporaryChecksumFileOnError()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
-
- mockFailedChecksums( path, expectedFile );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- File tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
-
- tempFile = new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" );
- assertFalse( "Check temporary file removed", tempFile.exists() );
- }
- }
-
- public void testGetChecksumBothCorrect()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-both-right/1.0/get-checksum-both-right-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
- FileUtils.fileRead( checksumFile ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectSha1NoMd5()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
- FileUtils.fileRead( checksumFile ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectSha1BadMd5()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "sha1" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
- FileUtils.fileRead( checksumFile ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- }
-
- public void testGetCorrectMd5NoSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "md5" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar",
- FileUtils.fileRead( checksumFile ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetCorrectMd5BadSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-md5-bad-sha1/1.0/get-checksum-md5-bad-sha1-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File checksumFile = getChecksumFile( file, "md5" );
- assertTrue( "Check file created", checksumFile.exists() );
- assertEquals( "Check checksum", "8a02aa67549d27b2a03cd4547439c6d3 *get-checksum-md5-bad-sha1-1.0.jar",
- FileUtils.fileRead( checksumFile ).trim() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetWithNoChecksums()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( file, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( file, "sha1" ).exists() );
- }
-
- public void testGetBadMd5BadSha1()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expect a failure
- assertFalse( "Check file not created", expectedFile.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
- }
- }
-
- public void testGetChecksumTransferFailed()
- throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
- AuthorizationException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- proxiedRepository1 = createRepository( "proxied1", "test://..." );
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository1 = createProxiedRepository( proxiedRepository1 );
- proxiedRepositories.add( proxiedArtifactRepository1 );
-
- wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
-
- mockFailedChecksums( path, expectedFile );
-
- wagonMockControl.replay();
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // as expected
- wagonMockControl.verify();
-
- assertFalse( "Check file not created", expectedFile.exists() );
-
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "md5" ).exists() );
- assertFalse( "Check file not created", getChecksumFile( expectedFile, "sha1" ).exists() );
- }
- }
-
- public void testGetAlwaysBadChecksumPresentLocallyAbsentRemote()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
-
- assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".sha1" ).exists() );
- assertFalse( "Check checksum removed", new File( file.getParentFile(), file.getName() + ".md5" ).exists() );
- }
-
- public void testGetChecksumPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetAlwaysChecksumPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-checksum-from-managed-repo/1.0/get-checksum-from-managed-repo-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetChecksumNotPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetAlwaysChecksumNotPresentInManagedRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar.sha1";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetMetadataNotPresent()
- throws ProxyException, IOException
- {
- String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "Found file: " + file + "; but was expecting a failure" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected
-
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testGetMetadataProxied()
- throws ProxyException, ResourceDoesNotExistException, IOException
- {
- String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- FileUtils.deleteDirectory( expectedFile.getParentFile() );
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- String expectedContents = getExpectedMetadata( "get-default-metadata", "1.0" );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testGetMetadataMergeRepos()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents = getExpectedMetadata( "get-merged-metadata", getVersioning(
- Arrays.asList( new String[]{"0.9", "1.0", "2.0", "3.0", "5.0", "4.0"} ) ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testGetMetadataRemovedFromProxies()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testGetReleaseMetadataNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
-
- String unexpectedContents = FileUtils.fileRead( new File( proxiedRepository1.getBasedir(), path ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetSnapshotMetadataNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
-
- String unexpectedContents = FileUtils.fileRead( new File( proxiedRepository1.getBasedir(), path ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetReleaseMetadataExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents =
- getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetSnapshotMetadataExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents =
- getExpectedMetadata( "get-updated-metadata", "1.0-SNAPSHOT", getVersioning( "20050831.111213", 2 ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetMetadataNotUpdated()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( proxiedFile.lastModified() );
-
- proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
-
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check content doesn't match proxy version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetMetadataUpdated()
- throws IOException, ResourceDoesNotExistException, ProxyException, ParseException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents =
- getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check content doesn't match old version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testGetAlwaysMetadata()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( new File( defaultManagedRepository.getBasedir(), path ) );
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.getAlways( path, proxiedRepositories, defaultManagedRepository );
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- String expectedContents =
- getExpectedMetadata( "get-updated-metadata", getVersioning( Arrays.asList( new String[]{"1.0", "2.0"} ) ) );
-
- assertEquals( "Check content matches", expectedContents, FileUtils.fileRead( file ) );
- assertFalse( "Check content doesn't match old version",
- unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testSnapshotNonExistant()
- throws ProxyException, IOException
- {
- String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- try
- {
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
- fail( "File returned was: " + file + "; should have got a not found exception" );
- }
- catch ( ResourceDoesNotExistException e )
- {
- // expected, but check file was not created
- assertFalse( expectedFile.exists() );
- }
- }
-
- public void testTimestampDrivenSnapshotNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testNewerTimestampDrivenSnapshotOnFirstRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testOlderTimestampDrivenSnapshotOnFirstRepo()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getFutureDate().getTime() );
-
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
-/* TODO: won't pass until Wagon preserves timestamp on download
- public void testNewerTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File repoLocation = getTestFile( "target/test-repository/proxied1" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/proxied1" ), repoLocation );
- proxiedRepository1 = createRepository( "proxied1", repoLocation );
-
- new File( proxiedRepository1.getBasedir(), path ).setLastModified( getPastDate().getTime() );
-
- proxiedRepositories.clear();
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-*/
-
- public void testOlderTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
- throws ParseException, ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File repoLocation = getTestFile( "target/test-repository/proxied2" );
- FileUtils.deleteDirectory( repoLocation );
- copyDirectoryStructure( getTestFile( "src/test/repositories/proxied2" ), repoLocation );
- proxiedRepository2 = createRepository( "proxied2", repoLocation );
-
- new File( proxiedRepository2.getBasedir(), path ).setLastModified( getPastDate().getTime() );
-
- proxiedRepositories.clear();
- proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- proxiedFile = new File( proxiedRepository2.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotExpired()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- proxiedFile.setLastModified( getFutureDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- String expectedContents = FileUtils.fileRead( expectedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotUpdated()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path =
- "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- expectedFile.setLastModified( proxiedFile.lastModified() );
-
- proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testTimestampDrivenSnapshotNotPresentAlreadyExpiredCacheFailure()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- proxiedRepositories.clear();
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
- proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
- proxiedRepositories.add( proxiedArtifactRepository );
- proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
-
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
-
- assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
- }
-
- public void testMetadataDrivenSnapshotNotPresentAlready()
- throws ResourceDoesNotExistException, ProxyException, IOException
- {
- String path =
- "org/apache/maven/test/get-metadata-snapshot/1.0-SNAPSHOT/get-metadata-snapshot-1.0-20050831.101112-1.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- }
-
- public void testGetMetadataDrivenSnapshotRemoteUpdate()
- throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
- {
- // Metadata driven snapshots (using a full timestamp) are treated like a release. It is the timing of the
- // updates to the metadata files that triggers which will be downloaded
-
- String path =
- "org/apache/maven/test/get-present-metadata-snapshot/1.0-SNAPSHOT/get-present-metadata-snapshot-1.0-20050831.101112-1.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
-
- assertTrue( expectedFile.exists() );
-
- expectedFile.setLastModified( getPastDate().getTime() );
-
- File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- }
-
- public void testLegacyManagedRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( proxiedRepository1.getBasedir(),
- "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar" );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyManagedRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( proxiedRepository1.getBasedir(),
- "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar" );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testLegacyProxyRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
-
- expectedFile.delete();
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile =
- new File( legacyProxiedRepository.getBasedir(), "org.apache.maven.test/jars/get-default-layout-1.0.jar" );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyProxyRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
- File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
- "org.apache.maven.test/jars/get-default-layout-present-1.0.jar" );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- public void testLegacyManagedAndProxyRepoGetNotPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
-
- assertFalse( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( proxiedFile );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- // TODO: timestamp preservation requires support for that in wagon
-// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
- }
-
- public void testLegacyManagedAndProxyRepoGetAlreadyPresent()
- throws IOException, ResourceDoesNotExistException, ProxyException
- {
- String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
- File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
- String expectedContents = FileUtils.fileRead( expectedFile );
- long originalModificationTime = expectedFile.lastModified();
-
- assertTrue( expectedFile.exists() );
-
- File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
-
- assertEquals( "Check file matches", expectedFile, file );
- assertTrue( "Check file created", file.exists() );
- assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
- File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
- String unexpectedContents = FileUtils.fileRead( proxiedFile );
- assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
- assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
- assertEquals( "Check file timestamp is that of original managed file", originalModificationTime,
- file.lastModified() );
- }
-
- private static Versioning getVersioning( List versions )
- {
- Versioning versioning = new Versioning();
- for ( Iterator i = versions.iterator(); i.hasNext(); )
- {
- String v = (String) i.next();
- versioning.addVersion( v );
- }
- return versioning;
- }
-
- private static String getExpectedMetadata( String artifactId, Versioning versioning )
- throws IOException
- {
- return getExpectedMetadata( artifactId, null, versioning );
- }
-
- private static String getExpectedMetadata( String artifactId, String version, Versioning versioning )
- throws IOException
- {
- StringWriter expectedContents = new StringWriter();
-
- Metadata m = new Metadata();
- m.setGroupId( "org.apache.maven.test" );
- m.setArtifactId( artifactId );
- m.setVersion( version );
- m.setVersioning( versioning );
- m.setModelEncoding( null );
-
- new MetadataXpp3Writer().write( expectedContents, m );
- return expectedContents.toString();
- }
-
- private static String getExpectedMetadata( String artifactId, String version )
- throws IOException
- {
- return getExpectedMetadata( artifactId, version, null );
- }
-
- private static Versioning getVersioning( String timestamp, int buildNumber )
- {
- Versioning versioning = new Versioning();
- versioning.setSnapshot( new Snapshot() );
- versioning.getSnapshot().setTimestamp( timestamp );
- versioning.getSnapshot().setBuildNumber( buildNumber );
- return versioning;
- }
-
- private static Date getPastDate()
- throws ParseException
- {
- return new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" );
- }
-
- private static Date getFutureDate()
- {
- Calendar cal = Calendar.getInstance();
- cal.add( Calendar.YEAR, 1 );
- return cal.getTime();
- }
-
- private void mockFailedChecksums( String path, File expectedFile )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- // must do it twice as it will re-attempt it
- wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".sha1", new File( expectedFile.getParentFile(), expectedFile.getName() + ".sha1.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
-
- wagonMock.get( path + ".md5", new File( expectedFile.getParentFile(), expectedFile.getName() + ".md5.tmp" ) );
- wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
- }
-
- private File getChecksumFile( File file, String algorithm )
- {
- return new File( file.getParentFile(), file.getName() + "." + algorithm );
- }
-
- /**
- * A faster recursive copy that omits .svn directories.
- *
- * @param sourceDirectory the source directory to copy
- * @param destDirectory the target location
- * @throws java.io.IOException if there is a copying problem
- * @todo get back into plexus-utils, share with converter module
- */
- private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
- throws IOException
- {
- if ( !sourceDirectory.exists() )
- {
- throw new IOException( "Source directory doesn't exists (" + sourceDirectory.getAbsolutePath() + ")." );
- }
-
- File[] files = sourceDirectory.listFiles();
-
- String sourcePath = sourceDirectory.getAbsolutePath();
-
- for ( int i = 0; i < files.length; i++ )
- {
- File file = files[i];
-
- String dest = file.getAbsolutePath();
-
- dest = dest.substring( sourcePath.length() + 1 );
-
- File destination = new File( destDirectory, dest );
-
- if ( file.isFile() )
- {
- destination = destination.getParentFile();
-
- FileUtils.copyFileToDirectory( file, destination );
- }
- else if ( file.isDirectory() )
- {
- if ( !".svn".equals( file.getName() ) )
- {
- if ( !destination.exists() && !destination.mkdirs() )
- {
- throw new IOException(
- "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
- }
-
- copyDirectoryStructure( file, destination );
- }
- }
- else
- {
- throw new IOException( "Unknown file type: " + file.getAbsolutePath() );
- }
- }
- }
-
- private static ProxiedArtifactRepository createProxiedRepository( ArtifactRepository repository )
- {
- ProxiedArtifactRepository proxiedArtifactRepository = new ProxiedArtifactRepository( repository );
- proxiedArtifactRepository.setName( repository.getId() );
- proxiedArtifactRepository.setCacheFailures( true );
- return proxiedArtifactRepository;
- }
-
- private static ProxiedArtifactRepository createHardFailProxiedRepository( ArtifactRepository repository )
- {
- ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( repository );
- proxiedArtifactRepository.setHardFail( true );
- return proxiedArtifactRepository;
- }
-
- private ArtifactRepository createRepository( String id, File repoLocation )
- throws MalformedURLException
- {
- return createRepository( id, repoLocation.toURI().toURL().toExternalForm() );
- }
-
- private ArtifactRepository createRepository( String id, File location, ArtifactRepositoryLayout layout )
- throws MalformedURLException
- {
- return createRepository( id, location.toURI().toURL().toExternalForm(), layout );
- }
-
- private ArtifactRepository createRepository( String id, String url )
- {
- return createRepository( id, url, defaultLayout );
- }
-
- private ArtifactRepository createRepository( String id, String url, ArtifactRepositoryLayout repositoryLayout )
- {
- return factory.createArtifactRepository( id, url, repositoryLayout, null, null );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.SessionListener;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.repository.Repository;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * A dummy wagon implementation
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class WagonDelegate
- implements Wagon
-{
- private Wagon delegate;
-
- private String contentToGet;
-
- public void get( String resourceName, File destination )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.get( resourceName, destination );
- create( destination );
- }
-
- public boolean getIfNewer( String resourceName, File destination, long timestamp )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
- createIfMissing( destination );
- return result;
- }
-
- public void put( File source, String destination )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.put( source, destination );
- }
-
- public void putDirectory( File sourceDirectory, String destinationDirectory )
- throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
- {
- delegate.putDirectory( sourceDirectory, destinationDirectory );
- }
-
- public boolean supportsDirectoryCopy()
- {
- return delegate.supportsDirectoryCopy();
- }
-
- public Repository getRepository()
- {
- return delegate.getRepository();
- }
-
- public void connect( Repository source )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source );
- }
-
- public void connect( Repository source, ProxyInfo proxyInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, proxyInfo );
- }
-
- public void connect( Repository source, AuthenticationInfo authenticationInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, authenticationInfo );
- }
-
- public void connect( Repository source, AuthenticationInfo authenticationInfo, ProxyInfo proxyInfo )
- throws ConnectionException, AuthenticationException
- {
- delegate.connect( source, authenticationInfo, proxyInfo );
- }
-
- public void openConnection()
- throws ConnectionException, AuthenticationException
- {
- delegate.openConnection();
- }
-
- public void disconnect()
- throws ConnectionException
- {
- delegate.disconnect();
- }
-
- public void addSessionListener( SessionListener listener )
- {
- delegate.addSessionListener( listener );
- }
-
- public void removeSessionListener( SessionListener listener )
- {
- delegate.removeSessionListener( listener );
- }
-
- public boolean hasSessionListener( SessionListener listener )
- {
- return delegate.hasSessionListener( listener );
- }
-
- public void addTransferListener( TransferListener listener )
- {
- delegate.addTransferListener( listener );
- }
-
- public void removeTransferListener( TransferListener listener )
- {
- delegate.removeTransferListener( listener );
- }
-
- public boolean hasTransferListener( TransferListener listener )
- {
- return delegate.hasTransferListener( listener );
- }
-
- public boolean isInteractive()
- {
- return delegate.isInteractive();
- }
-
- public void setInteractive( boolean interactive )
- {
- delegate.setInteractive( interactive );
- }
-
- public void setDelegate( Wagon delegate )
- {
- this.delegate = delegate;
- }
-
- void setContentToGet( String content )
- {
- contentToGet = content;
- }
-
- private void createIfMissing( File destination )
- {
- // since the mock won't actually copy a file, create an empty one to simulate file existence
- if ( !destination.exists() )
- {
- create( destination );
- }
- }
-
- private void create( File destination )
- {
- try
- {
- destination.getParentFile().mkdirs();
- if ( contentToGet == null )
- {
- destination.createNewFile();
- }
- else
- {
- FileUtils.fileWrite( destination.getAbsolutePath(), contentToGet );
- }
- }
- catch ( IOException e )
- {
- throw new RuntimeException( e.getMessage(), e );
- }
- }
-}
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.wagon.Wagon</role>
+ <role-hint>test</role-hint>
+ <implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
+ </component>
+ <component>
+ <role>org.codehaus.plexus.logging.LoggerManager</role>
+ <implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
+ <lifecycle-handler>basic</lifecycle-handler>
+
+ <configuration>
+ <threshold>ERROR</threshold>
+ </configuration>
+ </component>
+ </components>
+</component-set>
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.wagon.Wagon</role>
- <role-hint>test</role-hint>
- <implementation>org.apache.maven.repository.proxy.WagonDelegate</implementation>
- </component>
- <component>
- <role>org.codehaus.plexus.logging.LoggerManager</role>
- <implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
- <lifecycle-handler>basic</lifecycle-handler>
-
- <configuration>
- <threshold>ERROR</threshold>
- </configuration>
- </component>
- </components>
-</component-set>
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.List;
+
+/**
+ *
+ */
+public abstract class AbstractRepositoryQueryLayer
+ implements RepositoryQueryLayer
+{
+ protected ArtifactRepository repository;
+
+ public boolean containsArtifact( Artifact artifact )
+ {
+ File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
+ return f.exists();
+ }
+
+ public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
+ {
+ String artifactPath = getSnapshotArtifactRepositoryPath( artifact, snapshot );
+ File artifactFile = new File( artifactPath );
+ return artifactFile.exists();
+ }
+
+ public List getVersions( Artifact artifact )
+ throws RepositoryQueryLayerException
+ {
+ Metadata metadata = getMetadata( artifact );
+
+ return metadata.getVersioning().getVersions();
+ }
+
+ protected String getSnapshotArtifactRepositoryPath( Artifact artifact, Snapshot snapshot )
+ {
+ File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
+ String snapshotInfo = artifact.getVersion().replaceFirst( "SNAPSHOT", snapshot.getTimestamp() + "-" +
+ snapshot.getBuildNumber() + ".pom" );
+ File snapshotFile = new File( f.getParentFile(), artifact.getArtifactId() + "-" + snapshotInfo );
+ return snapshotFile.getAbsolutePath();
+ }
+
+ protected Metadata getMetadata( Artifact artifact )
+ throws RepositoryQueryLayerException
+ {
+ Metadata metadata;
+
+ ArtifactRepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
+ String path = repository.pathOfRemoteRepositoryMetadata( repositoryMetadata );
+ File metadataFile = new File( repository.getBasedir(), path );
+ if ( metadataFile.exists() )
+ {
+ MetadataXpp3Reader reader = new MetadataXpp3Reader();
+ try
+ {
+ metadata = reader.read( new FileReader( metadataFile ) );
+ }
+ catch ( FileNotFoundException e )
+ {
+ throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
+ }
+ }
+ else
+ {
+ throw new RepositoryQueryLayerException( "Metadata not found: " + metadataFile.getAbsolutePath() );
+ }
+
+ return metadata;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+
+/**
+ * This interface will be called by the main system for each artifact as it is discovered. This is how each of the
+ * different types of reports are implemented.
+ */
+public interface ArtifactReportProcessor
+{
+ String ROLE = ArtifactReportProcessor.class.getName();
+
+ void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter, ArtifactRepository repository )
+ throws ReportProcessorException;
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.Iterator;
+
+/**
+ * This interface is used by the single artifact processor.
+ * <p/>
+ * The initial implementation of this will just need to be a mock implementation in src/test/java, used to track the
+ * failures and successes for checking assertions. Later, implementations will be made to present reports on the
+ * web interface, send them via mail, and so on.
+ *
+ * @todo i18n
+ */
+public interface ArtifactReporter
+{
+ String ROLE = ArtifactReporter.class.getName();
+
+ String NULL_MODEL = "Provided model was null";
+
+ String NULL_ARTIFACT = "Provided artifact was null";
+
+ String EMPTY_GROUP_ID = "Group id was empty or null";
+
+ String EMPTY_ARTIFACT_ID = "Artifact id was empty or null";
+
+ String EMPTY_VERSION = "Version was empty or null";
+
+ String EMPTY_DEPENDENCY_GROUP_ID = "Group id was empty or null";
+
+ String EMPTY_DEPENDENCY_ARTIFACT_ID = "Artifact id was empty or null";
+
+ String EMPTY_DEPENDENCY_VERSION = "Version was empty or null";
+
+ String NO_DEPENDENCIES = "Artifact has no dependencies";
+
+ String ARTIFACT_NOT_FOUND = "Artifact does not exist in the repository";
+
+ String DEPENDENCY_NOT_FOUND = "Artifact's dependency does not exist in the repository";
+
+ void addFailure( Artifact artifact, String reason );
+
+ void addSuccess( Artifact artifact );
+
+ void addWarning( Artifact artifact, String message );
+
+ void addFailure( RepositoryMetadata metadata, String reason );
+
+ void addSuccess( RepositoryMetadata metadata );
+
+ void addWarning( RepositoryMetadata metadata, String message );
+
+ Iterator getArtifactFailureIterator();
+
+ Iterator getArtifactSuccessIterator();
+
+ Iterator getArtifactWarningIterator();
+
+ Iterator getRepositoryMetadataFailureIterator();
+
+ Iterator getRepositoryMetadataSuccessIterator();
+
+ Iterator getRepositoryMetadataWarningIterator();
+
+ int getFailures();
+
+ int getSuccesses();
+
+ int getWarnings();
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ * A result of the report for a given artifact being processed.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public class ArtifactResult
+{
+ private final Artifact artifact;
+
+ private final String reason;
+
+ public ArtifactResult( Artifact artifact )
+ {
+ this.artifact = artifact;
+ this.reason = null;
+ }
+
+ public ArtifactResult( Artifact artifact, String reason )
+ {
+ this.artifact = artifact;
+ this.reason = reason;
+ }
+
+ public Artifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public String getReason()
+ {
+ return reason;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.Plugin;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This class will report on bad metadata files. These include invalid version declarations and incomplete version
+ * information inside the metadata file. Plugin metadata will be checked for validity of the latest plugin artifacts.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.MetadataReportProcessor" role-hint="bad-metadata"
+ */
+public class BadMetadataReportProcessor
+ implements MetadataReportProcessor
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
+
+ /**
+ * Process the metadata encountered in the repository and report all errors found, if any.
+ *
+ * @param metadata the metadata to be processed.
+ * @param repository the repository where the metadata was encountered
+ * @param reporter the ArtifactReporter to receive processing results
+ * @throws ReportProcessorException if an error was occurred while processing the metadata
+ */
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
+ throws ReportProcessorException
+ {
+ boolean hasFailures = false;
+
+ if ( metadata.storedInGroupDirectory() )
+ {
+ try
+ {
+ hasFailures = checkPluginMetadata( metadata, repository, reporter );
+ }
+ catch ( IOException e )
+ {
+ throw new ReportProcessorException( "Error getting plugin artifact directories versions", e );
+ }
+ }
+ else
+ {
+ String lastUpdated = metadata.getMetadata().getVersioning().getLastUpdated();
+ if ( lastUpdated == null || lastUpdated.length() == 0 )
+ {
+ reporter.addFailure( metadata, "Missing lastUpdated element inside the metadata." );
+ hasFailures = true;
+ }
+
+ if ( metadata.storedInArtifactVersionDirectory() )
+ {
+ hasFailures |= checkSnapshotMetadata( metadata, repository, reporter );
+ }
+ else
+ {
+ if ( !checkMetadataVersions( metadata, repository, reporter ) )
+ {
+ hasFailures = true;
+ }
+
+ try
+ {
+ if ( checkRepositoryVersions( metadata, repository, reporter ) )
+ {
+ hasFailures = true;
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new ReportProcessorException( "Error getting versions", e );
+ }
+ }
+ }
+
+ if ( !hasFailures )
+ {
+ reporter.addSuccess( metadata );
+ }
+ }
+
+ /**
+ * Method for processing a GroupRepositoryMetadata
+ *
+ * @param metadata the metadata to be processed.
+ * @param repository the repository where the metadata was encountered
+ * @param reporter the ArtifactReporter to receive processing results
+ */
+ private boolean checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ArtifactReporter reporter )
+ throws IOException
+ {
+ boolean hasFailures = false;
+
+ File metadataDir =
+ new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ List pluginDirs = getArtifactIdFiles( metadataDir );
+
+ Map prefixes = new HashMap();
+ for ( Iterator plugins = metadata.getMetadata().getPlugins().iterator(); plugins.hasNext(); )
+ {
+ Plugin plugin = (Plugin) plugins.next();
+
+ String artifactId = plugin.getArtifactId();
+ if ( artifactId == null || artifactId.length() == 0 )
+ {
+ reporter.addFailure( metadata, "Missing or empty artifactId in group metadata." );
+ hasFailures = true;
+ }
+
+ String prefix = plugin.getPrefix();
+ if ( prefix == null || prefix.length() == 0 )
+ {
+ reporter.addFailure( metadata, "Missing or empty plugin prefix for artifactId " + artifactId + "." );
+ hasFailures = true;
+ }
+ else
+ {
+ if ( prefixes.containsKey( prefix ) )
+ {
+ reporter.addFailure( metadata, "Duplicate plugin prefix found: " + prefix + "." );
+ hasFailures = true;
+ }
+ else
+ {
+ prefixes.put( prefix, plugin );
+ }
+ }
+
+ if ( artifactId != null && artifactId.length() > 0 )
+ {
+ File pluginDir = new File( metadataDir, artifactId );
+ if ( !pluginDirs.contains( pluginDir ) )
+ {
+ reporter.addFailure( metadata, "Metadata plugin " + artifactId + " not found in the repository" );
+ hasFailures = true;
+ }
+ else
+ {
+ pluginDirs.remove( pluginDir );
+ }
+ }
+ }
+
+ if ( pluginDirs.size() > 0 )
+ {
+ for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
+ {
+ File plugin = (File) plugins.next();
+ reporter.addFailure( metadata, "Plugin " + plugin.getName() + " is present in the repository but " +
+ "missing in the metadata." );
+ }
+ hasFailures = true;
+ }
+
+ return hasFailures;
+ }
+
+ /**
+ * Method for processing a SnapshotArtifactRepository
+ *
+ * @param metadata the metadata to be processed.
+ * @param repository the repository where the metadata was encountered
+ * @param reporter the ArtifactReporter to receive processing results
+ */
+ private boolean checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
+ ArtifactReporter reporter )
+ {
+ RepositoryQueryLayer repositoryQueryLayer =
+ repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+
+ boolean hasFailures = false;
+
+ Snapshot snapshot = metadata.getMetadata().getVersioning().getSnapshot();
+ String timestamp = snapshot.getTimestamp();
+ String buildNumber = String.valueOf( snapshot.getBuildNumber() );
+
+ Artifact artifact = createArtifact( metadata );
+ if ( !repositoryQueryLayer.containsArtifact( artifact, snapshot ) )
+ {
+ reporter.addFailure( metadata, "Snapshot artifact " + timestamp + "-" + buildNumber + " does not exist." );
+ hasFailures = true;
+ }
+
+ return hasFailures;
+ }
+
+ /**
+ * Method for validating the versions declared inside an ArtifactRepositoryMetadata
+ *
+ * @param metadata the metadata to be processed.
+ * @param repository the repository where the metadata was encountered
+ * @param reporter the ArtifactReporter to receive processing results
+ */
+ private boolean checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+ ArtifactReporter reporter )
+ {
+ RepositoryQueryLayer repositoryQueryLayer =
+ repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
+
+ boolean hasFailures = false;
+ Versioning versioning = metadata.getMetadata().getVersioning();
+ for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
+ {
+ String version = (String) versions.next();
+
+ Artifact artifact = createArtifact( metadata, version );
+
+ if ( !repositoryQueryLayer.containsArtifact( artifact ) )
+ {
+ reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
+ "missing in the repository." );
+ hasFailures = true;
+ }
+ }
+ return hasFailures;
+ }
+
+ /**
+ * Searches the artifact repository directory for all versions and verifies that all of them are listed in the
+ * ArtifactRepositoryMetadata
+ *
+ * @param metadata the metadata to be processed.
+ * @param repository the repository where the metadata was encountered
+ * @param reporter the ArtifactReporter to receive processing results
+ */
+ private boolean checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
+ ArtifactReporter reporter )
+ throws IOException
+ {
+ boolean hasFailures = false;
+ Versioning versioning = metadata.getMetadata().getVersioning();
+ File versionsDir =
+ new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
+ List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
+ for ( Iterator i = versions.iterator(); i.hasNext(); )
+ {
+ File path = new File( (String) i.next() );
+ String version = path.getParentFile().getName();
+ if ( !versioning.getVersions().contains( version ) )
+ {
+ reporter.addFailure( metadata, "Artifact version " + version + " found in the repository but " +
+ "missing in the metadata." );
+ hasFailures = true;
+ }
+ }
+ return hasFailures;
+ }
+
+ /**
+ * Used to create an artifact object from a metadata base version
+ */
+ private Artifact createArtifact( RepositoryMetadata metadata )
+ {
+ return artifactFactory.createBuildArtifact( metadata.getGroupId(), metadata.getArtifactId(),
+ metadata.getBaseVersion(), "pom" );
+ }
+
+ /**
+ * Used to create an artifact object with a specified version
+ */
+ private Artifact createArtifact( RepositoryMetadata metadata, String version )
+ {
+ return artifactFactory.createBuildArtifact( metadata.getGroupId(), metadata.getArtifactId(), version, "pom" );
+ }
+
+ /**
+ * Used to gather artifactIds from a groupId directory
+ */
+ private List getArtifactIdFiles( File groupIdDir )
+ throws IOException
+ {
+ List artifactIdFiles = new ArrayList();
+
+ List fileArray = new ArrayList( Arrays.asList( groupIdDir.listFiles() ) );
+ for ( Iterator files = fileArray.iterator(); files.hasNext(); )
+ {
+ File artifactDir = (File) files.next();
+
+ if ( artifactDir.isDirectory() )
+ {
+ List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
+ if ( versions.size() > 0 )
+ {
+ artifactIdFiles.add( artifactDir );
+ }
+ }
+ }
+
+ return artifactIdFiles;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Class to implement caching.
+ */
+public class Cache
+{
+ private final Map cache;
+
+ private final double cacheHitRatio;
+
+ private final int cacheMaxSize;
+
+ private long cacheHits;
+
+ private long cacheMiss;
+
+ /**
+ * Caches all data and expires only the oldest data when the specified cache hit rate is reached.
+ */
+ public Cache( double cacheHitRatio )
+ {
+ this( cacheHitRatio, 0 );
+ }
+
+ /**
+ * Caches all data and expires only the oldest data when the maximum cache size is reached
+ */
+ public Cache( int cacheMaxSize )
+ {
+ this( (double) 1, cacheMaxSize );
+ }
+
+ /**
+ * Caches all data and expires only the oldest data when either the specified cache hit rate is reached
+ * or the maximum cache size is reached.
+ */
+ public Cache( double cacheHitRatio, int cacheMaxSize )
+ {
+ this.cacheHitRatio = cacheHitRatio;
+ this.cacheMaxSize = cacheMaxSize;
+
+ if ( cacheMaxSize > 0 )
+ {
+ cache = new LinkedHashMap( cacheMaxSize );
+ }
+ else
+ {
+ cache = new LinkedHashMap();
+ }
+ }
+
+ /**
+ * Check if the specified key is already mapped to an object.
+ *
+ * @param key the key used to map the cached object
+ * @return true if the cache contains an object associated with the given key
+ */
+ public boolean containsKey( Object key )
+ {
+ boolean contains;
+ synchronized ( cache )
+ {
+ contains = cache.containsKey( key );
+
+ if ( contains )
+ {
+ cacheHits++;
+ }
+ else
+ {
+ cacheMiss++;
+ }
+ }
+
+ return contains;
+ }
+
+ /**
+ * Check for a cached object and return it if it exists. Returns null when the keyed object is not found
+ *
+ * @param key the key used to map the cached object
+ * @return the object mapped to the given key, or null if no cache object is mapped to the given key
+ */
+ public Object get( Object key )
+ {
+ Object retValue = null;
+
+ synchronized ( cache )
+ {
+ if ( cache.containsKey( key ) )
+ {
+ // remove and put: this promotes it to the top since we use a linked hash map
+ retValue = cache.remove( key );
+
+ cache.put( key, retValue );
+
+ cacheHits++;
+ }
+ else
+ {
+ cacheMiss++;
+ }
+ }
+
+ return retValue;
+ }
+
+ /**
+ * Cache the given value and map it using the given key
+ *
+ * @param key the object to map the valued object
+ * @param value the object to cache
+ */
+ public void put( Object key, Object value )
+ {
+ // remove and put: this promotes it to the top since we use a linked hash map
+ synchronized ( cache )
+ {
+ if ( cache.containsKey( key ) )
+ {
+ cache.remove( key );
+ }
+
+ cache.put( key, value );
+ }
+
+ manageCache();
+ }
+
+ /**
+ * Compute for the efficiency of this cache.
+ *
+ * @return the ratio of cache hits to the cache misses to queries for cache objects
+ */
+ public double getHitRate()
+ {
+ synchronized ( cache )
+ {
+ return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss );
+ }
+ }
+
+ /**
+ * Get the total number of cache objects currently cached.
+ */
+ public int size()
+ {
+ return cache.size();
+ }
+
+ /**
+ * Empty the cache and reset the cache hit rate
+ */
+ public void clear()
+ {
+ synchronized ( cache )
+ {
+ cacheHits = 0;
+ cacheMiss = 0;
+ cache.clear();
+ }
+ }
+
+ private void manageCache()
+ {
+ synchronized ( cache )
+ {
+ Iterator iterator = cache.entrySet().iterator();
+ if ( cacheMaxSize == 0 )
+ {
+ //desired HitRatio is reached, we can trim the cache to conserve memory
+ if ( cacheHitRatio <= getHitRate() )
+ {
+ iterator.next();
+ iterator.remove();
+ }
+ }
+ else if ( cache.size() > cacheMaxSize )
+ {
+ // maximum cache size is reached
+ while ( cache.size() > cacheMaxSize )
+ {
+ iterator.next();
+ iterator.remove();
+ }
+ }
+ else
+ {
+ //even though the max has not been reached, the desired HitRatio is already reached,
+ // so we can trim the cache to conserve memory
+ if ( cacheHitRatio <= getHitRate() )
+ {
+ iterator.next();
+ iterator.remove();
+ }
+ }
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.Metadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+
+
+/**
+ *
+ */
+public class CachedRepositoryQueryLayer
+ extends AbstractRepositoryQueryLayer
+{
+ private Cache cache;
+
+ public static final double CACHE_HIT_RATIO = 0.5;
+
+ public CachedRepositoryQueryLayer( ArtifactRepository repository )
+ {
+ this.repository = repository;
+
+ cache = new Cache( CACHE_HIT_RATIO );
+ }
+
+ public double getCacheHitRate()
+ {
+ return cache.getHitRate();
+ }
+
+ public boolean containsArtifact( Artifact artifact )
+ {
+ boolean artifactFound = true;
+
+ String artifactPath = repository.getBasedir() + "/" + repository.pathOf( artifact );
+
+ if ( cache.get( artifactPath ) == null )
+ {
+ artifactFound = super.containsArtifact( artifact );
+ if ( artifactFound )
+ {
+ cache.put( artifactPath, artifactPath );
+ }
+ }
+
+ return artifactFound;
+ }
+
+ public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
+ {
+ boolean artifactFound = true;
+
+ String path = getSnapshotArtifactRepositoryPath( artifact, snapshot );
+
+ if ( cache.get( path ) == null )
+ {
+ artifactFound = super.containsArtifact( artifact, snapshot );
+ if ( artifactFound )
+ {
+ cache.put( path, path );
+ }
+ }
+
+ return artifactFound;
+ }
+
+ /**
+ * Override method to utilize the cache
+ */
+ protected Metadata getMetadata( Artifact artifact )
+ throws RepositoryQueryLayerException
+ {
+ Metadata metadata = (Metadata) cache.get( artifact.getId() );
+
+ if ( metadata == null )
+ {
+ metadata = super.getMetadata( artifact );
+ cache.put( artifact.getId(), metadata );
+ }
+
+ return metadata;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * This class reports invalid and mismatched checksums of artifacts and metadata files.
+ * It validates MD5 and SHA-1 checksums.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="checksum"
+ */
+public class ChecksumArtifactReporter
+ implements ArtifactReportProcessor
+{
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ private Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ private Digester md5Digester;
+
+ /**
+ * Validate the checksum of the specified artifact.
+ *
+ * @param model
+ * @param artifact
+ * @param reporter
+ * @param repository
+ */
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+ throw new UnsupportedOperationException(
+ "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ }
+
+ //check if checksum files exist
+ String path = repository.pathOf( artifact );
+ File file = new File( repository.getBasedir(), path );
+
+ verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
+ }
+
+ private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
+ ArtifactReporter reporter, Artifact artifact )
+ {
+ File checksumFile = new File( repository.getBasedir(), path );
+ if ( checksumFile.exists() )
+ {
+ try
+ {
+ digester.verify( file, FileUtils.fileRead( checksumFile ) );
+
+ reporter.addSuccess( artifact );
+ }
+ catch ( DigesterException e )
+ {
+ reporter.addFailure( artifact, e.getMessage() );
+ }
+ catch ( IOException e )
+ {
+ reporter.addFailure( artifact, "Read file error: " + e.getMessage() );
+ }
+ }
+ else
+ {
+ reporter.addFailure( artifact, digester.getAlgorithm() + " checksum file does not exist." );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * This class reports invalid and mismatched checksums of artifacts and metadata files.
+ * It validates MD5 and SHA-1 checksums.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.MetadataReportProcessor" role-hint="checksum-metadata"
+ */
+public class ChecksumMetadataReporter
+ implements MetadataReportProcessor
+{
+ /**
+ * @plexus.requirement role-hint="sha1"
+ */
+ private Digester sha1Digester;
+
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ private Digester md5Digester;
+
+ /**
+ * Validate the checksums of the metadata. Get the metadata file from the
+ * repository then validate the checksum.
+ */
+ public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+ throw new UnsupportedOperationException(
+ "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ }
+
+ //check if checksum files exist
+ String path = repository.pathOfRemoteRepositoryMetadata( metadata );
+ File file = new File( repository.getBasedir(), path );
+
+ verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
+ verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
+
+ }
+
+ private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
+ ArtifactReporter reporter, RepositoryMetadata metadata )
+ {
+ File checksumFile = new File( repository.getBasedir(), path );
+ if ( checksumFile.exists() )
+ {
+ try
+ {
+ digester.verify( file, FileUtils.fileRead( checksumFile ) );
+
+ reporter.addSuccess( metadata );
+ }
+ catch ( DigesterException e )
+ {
+ reporter.addFailure( metadata, e.getMessage() );
+ }
+ catch ( IOException e )
+ {
+ reporter.addFailure( metadata, "Read file error: " + e.getMessage() );
+ }
+ }
+ else
+ {
+ reporter.addFailure( metadata, digester.getAlgorithm() + " checksum file does not exist." );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.Model;
+
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="default"
+ */
+public class DefaultArtifactReportProcessor
+ implements ArtifactReportProcessor
+{
+ private static final String EMPTY_STRING = "";
+
+ // plexus components
+ private ArtifactFactory artifactFactory;
+
+ private RepositoryQueryLayer repositoryQueryLayer;
+
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ {
+ if ( artifact == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.NULL_ARTIFACT );
+ }
+ else
+ {
+ processArtifact( artifact, reporter );
+ }
+
+ if ( model == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.NULL_MODEL );
+ }
+ else
+ {
+ List dependencies = model.getDependencies();
+ processDependencies( dependencies, reporter );
+ }
+ }
+
+ private void processArtifact( Artifact artifact, ArtifactReporter reporter )
+ {
+ boolean hasFailed = false;
+ if ( EMPTY_STRING.equals( artifact.getGroupId() ) || artifact.getGroupId() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_GROUP_ID );
+ hasFailed = true;
+ }
+ if ( EMPTY_STRING.equals( artifact.getArtifactId() ) || artifact.getArtifactId() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_ARTIFACT_ID );
+ hasFailed = true;
+ }
+ if ( EMPTY_STRING.equals( artifact.getVersion() ) || artifact.getVersion() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_VERSION );
+ hasFailed = true;
+ }
+ if ( !hasFailed )
+ {
+ if ( repositoryQueryLayer.containsArtifact( artifact ) )
+ {
+ reporter.addSuccess( artifact );
+ }
+ else
+ {
+ reporter.addFailure( artifact, ArtifactReporter.ARTIFACT_NOT_FOUND );
+ }
+ }
+ }
+
+ private void processDependencies( List dependencies, ArtifactReporter reporter )
+ {
+ if ( dependencies.size() > 0 )
+ {
+ Iterator iterator = dependencies.iterator();
+ while ( iterator.hasNext() )
+ {
+ boolean hasFailed = false;
+ Dependency dependency = (Dependency) iterator.next();
+ Artifact artifact = createArtifact( dependency );
+ if ( EMPTY_STRING.equals( dependency.getGroupId() ) || dependency.getGroupId() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID );
+ hasFailed = true;
+ }
+ if ( EMPTY_STRING.equals( dependency.getArtifactId() ) || dependency.getArtifactId() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID );
+ hasFailed = true;
+ }
+ if ( EMPTY_STRING.equals( dependency.getVersion() ) || dependency.getVersion() == null )
+ {
+ reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_VERSION );
+ hasFailed = true;
+ }
+ if ( !hasFailed )
+ {
+ if ( repositoryQueryLayer.containsArtifact( artifact ) )
+ {
+ reporter.addSuccess( artifact );
+ }
+ else
+ {
+ reporter.addFailure( artifact, ArtifactReporter.DEPENDENCY_NOT_FOUND );
+ }
+ }
+ }
+ }
+
+ }
+
+ /**
+ * Only used for passing a mock object when unit testing
+ *
+ * @param repositoryQueryLayer
+ */
+ protected void setRepositoryQueryLayer( RepositoryQueryLayer repositoryQueryLayer )
+ {
+ this.repositoryQueryLayer = repositoryQueryLayer;
+ }
+
+ /**
+ * Only used for passing a mock object when unit testing
+ *
+ * @param artifactFactory
+ */
+ protected void setArtifactFactory( ArtifactFactory artifactFactory )
+ {
+ this.artifactFactory = artifactFactory;
+ }
+
+ private Artifact createArtifact( Dependency dependency )
+ {
+ return artifactFactory.createBuildArtifact( dependency.getGroupId(), dependency.getArtifactId(),
+ dependency.getVersion(), "pom" );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReporter" role-hint="default"
+ */
+public class DefaultArtifactReporter
+ implements ArtifactReporter
+{
+ private List artifactFailures = new ArrayList();
+
+ private List artifactSuccesses = new ArrayList();
+
+ private List artifactWarnings = new ArrayList();
+
+ private List metadataFailures = new ArrayList();
+
+ private List metadataSuccesses = new ArrayList();
+
+ private List metadataWarnings = new ArrayList();
+
+ public void addFailure( Artifact artifact, String reason )
+ {
+ artifactFailures.add( new ArtifactResult( artifact, reason ) );
+ }
+
+ public void addSuccess( Artifact artifact )
+ {
+ artifactSuccesses.add( new ArtifactResult( artifact ) );
+ }
+
+ public void addWarning( Artifact artifact, String message )
+ {
+ artifactWarnings.add( new ArtifactResult( artifact, message ) );
+ }
+
+ public void addFailure( RepositoryMetadata metadata, String reason )
+ {
+ metadataFailures.add( new RepositoryMetadataResult( metadata, reason ) );
+ }
+
+ public void addSuccess( RepositoryMetadata metadata )
+ {
+ metadataSuccesses.add( new RepositoryMetadataResult( metadata ) );
+ }
+
+ public void addWarning( RepositoryMetadata metadata, String message )
+ {
+ metadataWarnings.add( new RepositoryMetadataResult( metadata, message ) );
+ }
+
+ public Iterator getArtifactFailureIterator()
+ {
+ return artifactFailures.iterator();
+ }
+
+ public Iterator getArtifactSuccessIterator()
+ {
+ return artifactSuccesses.iterator();
+ }
+
+ public Iterator getArtifactWarningIterator()
+ {
+ return artifactWarnings.iterator();
+ }
+
+ public Iterator getRepositoryMetadataFailureIterator()
+ {
+ return metadataFailures.iterator();
+ }
+
+ public Iterator getRepositoryMetadataSuccessIterator()
+ {
+ return metadataSuccesses.iterator();
+ }
+
+ public Iterator getRepositoryMetadataWarningIterator()
+ {
+ return metadataWarnings.iterator();
+ }
+
+ public int getFailures()
+ {
+ return artifactFailures.size() + metadataFailures.size();
+ }
+
+ public int getSuccesses()
+ {
+ return artifactSuccesses.size() + metadataSuccesses.size();
+ }
+
+ public int getWarnings()
+ {
+ return artifactWarnings.size() + metadataWarnings.size();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ *
+ */
+public class DefaultRepositoryQueryLayer
+ extends AbstractRepositoryQueryLayer
+{
+ public DefaultRepositoryQueryLayer( ArtifactRepository repository )
+ {
+ this.repository = repository;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * Gets the default implementation of a repository query layer for the given repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ * @plexus.component role="org.apache.maven.archiva.reporting.RepositoryQueryLayerFactory"
+ */
+public class DefaultRepositoryQueryLayerFactory
+ implements RepositoryQueryLayerFactory
+{
+ public RepositoryQueryLayer createRepositoryQueryLayer( ArtifactRepository repository )
+ {
+ return new DefaultRepositoryQueryLayer( repository );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.lucene.LuceneQuery;
+import org.apache.maven.archiva.indexing.record.StandardArtifactIndexRecord;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+
+import java.io.File;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Validates an artifact file for duplicates within the same groupId based from what's available in a repository index.
+ *
+ * @author Edwin Punzalan
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="duplicate"
+ */
+public class DuplicateArtifactFileReportProcessor
+ implements ArtifactReportProcessor
+{
+ /**
+ * @plexus.requirement role-hint="md5"
+ */
+ private Digester digester;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory indexFactory;
+
+ /**
+ * @plexus.configuration
+ */
+ private String indexDirectory;
+
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ throws ReportProcessorException
+ {
+ if ( artifact.getFile() != null )
+ {
+ RepositoryArtifactIndex index = indexFactory.createStandardIndex( new File( indexDirectory ) );
+
+ String checksum;
+ try
+ {
+ checksum = digester.calc( artifact.getFile() );
+ }
+ catch ( DigesterException e )
+ {
+ throw new ReportProcessorException( "Failed to generate checksum", e );
+ }
+
+ try
+ {
+ List results = index.search( new LuceneQuery(
+ new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
+
+ if ( results.isEmpty() )
+ {
+ reporter.addSuccess( artifact );
+ }
+ else
+ {
+ boolean hasDuplicates = false;
+ for ( Iterator i = results.iterator(); i.hasNext(); )
+ {
+ StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
+
+ //make sure it is not the same artifact
+ if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
+ {
+ //report only duplicates from the same groupId
+ String groupId = artifact.getGroupId();
+ if ( groupId.equals( result.getGroupId() ) )
+ {
+ hasDuplicates = true;
+ reporter.addFailure( artifact, "Found duplicate for " + artifact.getId() );
+ }
+ }
+ }
+
+ if ( !hasDuplicates )
+ {
+ reporter.addSuccess( artifact );
+ }
+ }
+ }
+ catch ( RepositoryIndexSearchException e )
+ {
+ throw new ReportProcessorException( "Failed to search in index", e );
+ }
+ }
+ else
+ {
+ reporter.addWarning( artifact, "Artifact file is null" );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * This class validates well-formedness of pom xml file.
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="invalid-pom"
+ */
+public class InvalidPomArtifactReportProcessor
+ implements ArtifactReportProcessor
+{
+ /**
+ * @param model
+ * @param artifact The pom xml file to be validated, passed as an artifact object.
+ * @param reporter The artifact reporter object.
+ * @param repository the repository where the artifact is located.
+ */
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+ throw new UnsupportedOperationException(
+ "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ }
+
+ if ( "pom".equals( artifact.getType().toLowerCase() ) )
+ {
+ File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
+
+ if ( !f.exists() )
+ {
+ reporter.addFailure( artifact, "Artifact not found." );
+ }
+ else
+ {
+ Reader reader = null;
+
+ MavenXpp3Reader pomReader = new MavenXpp3Reader();
+
+ try
+ {
+ reader = new FileReader( f );
+ pomReader.read( reader );
+ reporter.addSuccess( artifact );
+ }
+ catch ( XmlPullParserException e )
+ {
+ reporter.addFailure( artifact, "The pom xml file is not well-formed. Error while parsing: " +
+ e.getMessage() );
+ }
+ catch ( FileNotFoundException e )
+ {
+ reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
+ }
+ catch ( IOException e )
+ {
+ reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
+ }
+ finally
+ {
+ IOUtil.close( reader );
+ }
+ }
+ }
+ else
+ {
+ reporter.addWarning( artifact, "The artifact is not a pom xml file." );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+/**
+ * Validate the location of the artifact based on the values indicated
+ * in its pom (both the pom packaged with the artifact & the pom in the
+ * file system).
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.ArtifactReportProcessor" role-hint="artifact-location"
+ */
+public class LocationArtifactReportProcessor
+ implements ArtifactReportProcessor
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * Check whether the artifact is in its proper location. The location of the artifact
+ * is validated first against the groupId, artifactId and versionId in the specified model
+ * object (pom in the file system). Then unpack the artifact (jar file) and get the model (pom)
+ * included in the package. If a model exists inside the package, then check if the artifact's
+ * location is valid based on the location specified in the pom. Check if the both the location
+ * specified in the file system pom and in the pom included in the package is the same.
+ *
+ * @param model Represents the pom in the file system.
+ * @param artifact
+ * @param reporter
+ * @param repository
+ */
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ throws ReportProcessorException
+ {
+ if ( !"file".equals( repository.getProtocol() ) )
+ {
+ // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
+ throw new UnsupportedOperationException(
+ "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
+ }
+
+ //check if the artifact is located in its proper location based on the info
+ //specified in the model object/pom
+ Artifact modelArtifact = artifactFactory.createBuildArtifact( model.getGroupId(), model.getArtifactId(),
+ model.getVersion(), model.getPackaging() );
+
+ boolean failed = false;
+ String modelPath = repository.pathOf( modelArtifact );
+ String artifactPath = repository.pathOf( artifact );
+ if ( modelPath.equals( artifactPath ) )
+ {
+ //get the location of the artifact itself
+ File file = new File( repository.getBasedir(), artifactPath );
+
+ if ( file.exists() )
+ {
+ //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
+ //check if the pom is included in the package
+ Model extractedModel = readArtifactModel( file, artifact.getGroupId(), artifact.getArtifactId() );
+
+ if ( extractedModel != null )
+ {
+ Artifact extractedArtifact = artifactFactory.createBuildArtifact( extractedModel.getGroupId(),
+ extractedModel.getArtifactId(),
+ extractedModel.getVersion(),
+ extractedModel.getPackaging() );
+ if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
+ {
+ reporter.addFailure( artifact,
+ "The artifact is out of place. It does not match the specified location in the packaged pom." );
+ failed = true;
+ }
+ }
+ }
+ else
+ {
+ reporter.addFailure( artifact,
+ "The artifact is out of place. It does not exist at the specified location in the repository pom." );
+ failed = true;
+ }
+ }
+ else
+ {
+ reporter.addFailure( artifact,
+ "The artifact is out of place. It does not match the specified location in the repository pom." );
+ failed = true;
+ }
+
+ if ( !failed )
+ {
+ reporter.addSuccess( artifact );
+ }
+ }
+
+ /**
+ * Extract the contents of the artifact/jar file.
+ *
+ * @param file
+ * @param groupId
+ * @param artifactId
+ */
+ private Model readArtifactModel( File file, String groupId, String artifactId )
+ throws ReportProcessorException
+ {
+ Model model = null;
+
+ JarFile jar = null;
+ try
+ {
+ jar = new JarFile( file );
+
+ //Get the entry and its input stream.
+ JarEntry entry = jar.getJarEntry( "META-INF/maven/" + groupId + "/" + artifactId + "/pom.xml" );
+
+ // If the entry is not null, extract it.
+ if ( entry != null )
+ {
+ model = readModel( jar.getInputStream( entry ) );
+ }
+ }
+ catch ( IOException e )
+ {
+ // TODO: should just warn and continue?
+ throw new ReportProcessorException( "Unable to read artifact to extract model", e );
+ }
+ catch ( XmlPullParserException e )
+ {
+ // TODO: should just warn and continue?
+ throw new ReportProcessorException( "Unable to read artifact to extract model", e );
+ }
+ finally
+ {
+ if ( jar != null )
+ {
+ //noinspection UnusedCatchParameter
+ try
+ {
+ jar.close();
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+ }
+ return model;
+ }
+
+ private Model readModel( InputStream entryStream )
+ throws IOException, XmlPullParserException
+ {
+ Reader isReader = new InputStreamReader( entryStream );
+
+ Model model;
+ try
+ {
+ MavenXpp3Reader pomReader = new MavenXpp3Reader();
+ model = pomReader.read( isReader );
+ }
+ finally
+ {
+ IOUtil.close( isReader );
+ }
+ return model;
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * This interface is called by the main system for each piece of metadata as it is discovered.
+ */
+public interface MetadataReportProcessor
+{
+ String ROLE = MetadataReportProcessor.class.getName();
+
+ void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
+ throws ReportProcessorException;
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Exception occurring during reporting.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public class ReportProcessorException
+ extends Exception
+{
+ public ReportProcessorException( String msg, Throwable cause )
+ {
+ super( msg, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+/**
+ * A result of the report for a given artifact being processed.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public class RepositoryMetadataResult
+{
+ private final RepositoryMetadata metadata;
+
+ private final String reason;
+
+ public RepositoryMetadataResult( RepositoryMetadata metadata )
+ {
+ this.metadata = metadata;
+ this.reason = null;
+ }
+
+ public RepositoryMetadataResult( RepositoryMetadata metadata, String reason )
+ {
+ this.metadata = metadata;
+ this.reason = reason;
+ }
+
+ public RepositoryMetadata getMetadata()
+ {
+ return metadata;
+ }
+
+ public String getReason()
+ {
+ return reason;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+
+import java.util.List;
+
+/**
+ * The transitive and metadata validation reports will need to query the repository for artifacts.
+ */
+public interface RepositoryQueryLayer
+{
+ String ROLE = RepositoryQueryLayer.class.getName();
+
+ boolean containsArtifact( Artifact artifact );
+
+ /**
+ * @todo I believe we can remove this [BP] - artifact should contain all the necessary version info
+ */
+ boolean containsArtifact( Artifact artifact, Snapshot snapshot );
+
+ List getVersions( Artifact artifact )
+ throws RepositoryQueryLayerException;
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ */
+public class RepositoryQueryLayerException
+ extends Exception
+{
+ public RepositoryQueryLayerException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public RepositoryQueryLayerException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+
+/**
+ * Gets the preferred implementation of a repository query layer for the given repository.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public interface RepositoryQueryLayerFactory
+{
+ String ROLE = RepositoryQueryLayerFactory.class.getName();
+
+ /**
+ * Create or obtain a query interface.
+ *
+ * @param repository the repository to query
+ * @return the obtained query layer
+ */
+ RepositoryQueryLayer createRepositoryQueryLayer( ArtifactRepository repository );
+}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.List;
-
-/**
- *
- */
-public abstract class AbstractRepositoryQueryLayer
- implements RepositoryQueryLayer
-{
- protected ArtifactRepository repository;
-
- public boolean containsArtifact( Artifact artifact )
- {
- File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
- return f.exists();
- }
-
- public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
- {
- String artifactPath = getSnapshotArtifactRepositoryPath( artifact, snapshot );
- File artifactFile = new File( artifactPath );
- return artifactFile.exists();
- }
-
- public List getVersions( Artifact artifact )
- throws RepositoryQueryLayerException
- {
- Metadata metadata = getMetadata( artifact );
-
- return metadata.getVersioning().getVersions();
- }
-
- protected String getSnapshotArtifactRepositoryPath( Artifact artifact, Snapshot snapshot )
- {
- File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
- String snapshotInfo = artifact.getVersion().replaceFirst( "SNAPSHOT", snapshot.getTimestamp() + "-" +
- snapshot.getBuildNumber() + ".pom" );
- File snapshotFile = new File( f.getParentFile(), artifact.getArtifactId() + "-" + snapshotInfo );
- return snapshotFile.getAbsolutePath();
- }
-
- protected Metadata getMetadata( Artifact artifact )
- throws RepositoryQueryLayerException
- {
- Metadata metadata;
-
- ArtifactRepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
- String path = repository.pathOfRemoteRepositoryMetadata( repositoryMetadata );
- File metadataFile = new File( repository.getBasedir(), path );
- if ( metadataFile.exists() )
- {
- MetadataXpp3Reader reader = new MetadataXpp3Reader();
- try
- {
- metadata = reader.read( new FileReader( metadataFile ) );
- }
- catch ( FileNotFoundException e )
- {
- throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
- }
- catch ( IOException e )
- {
- throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
- }
- catch ( XmlPullParserException e )
- {
- throw new RepositoryQueryLayerException( "Error occurred while attempting to read metadata file", e );
- }
- }
- else
- {
- throw new RepositoryQueryLayerException( "Metadata not found: " + metadataFile.getAbsolutePath() );
- }
-
- return metadata;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-
-/**
- * This interface will be called by the main system for each artifact as it is discovered. This is how each of the
- * different types of reports are implemented.
- */
-public interface ArtifactReportProcessor
-{
- String ROLE = ArtifactReportProcessor.class.getName();
-
- void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter, ArtifactRepository repository )
- throws ReportProcessorException;
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.Iterator;
-
-/**
- * This interface is used by the single artifact processor.
- * <p/>
- * The initial implementation of this will just need to be a mock implementation in src/test/java, used to track the
- * failures and successes for checking assertions. Later, implementations will be made to present reports on the
- * web interface, send them via mail, and so on.
- *
- * @todo i18n
- */
-public interface ArtifactReporter
-{
- String ROLE = ArtifactReporter.class.getName();
-
- String NULL_MODEL = "Provided model was null";
-
- String NULL_ARTIFACT = "Provided artifact was null";
-
- String EMPTY_GROUP_ID = "Group id was empty or null";
-
- String EMPTY_ARTIFACT_ID = "Artifact id was empty or null";
-
- String EMPTY_VERSION = "Version was empty or null";
-
- String EMPTY_DEPENDENCY_GROUP_ID = "Group id was empty or null";
-
- String EMPTY_DEPENDENCY_ARTIFACT_ID = "Artifact id was empty or null";
-
- String EMPTY_DEPENDENCY_VERSION = "Version was empty or null";
-
- String NO_DEPENDENCIES = "Artifact has no dependencies";
-
- String ARTIFACT_NOT_FOUND = "Artifact does not exist in the repository";
-
- String DEPENDENCY_NOT_FOUND = "Artifact's dependency does not exist in the repository";
-
- void addFailure( Artifact artifact, String reason );
-
- void addSuccess( Artifact artifact );
-
- void addWarning( Artifact artifact, String message );
-
- void addFailure( RepositoryMetadata metadata, String reason );
-
- void addSuccess( RepositoryMetadata metadata );
-
- void addWarning( RepositoryMetadata metadata, String message );
-
- Iterator getArtifactFailureIterator();
-
- Iterator getArtifactSuccessIterator();
-
- Iterator getArtifactWarningIterator();
-
- Iterator getRepositoryMetadataFailureIterator();
-
- Iterator getRepositoryMetadataSuccessIterator();
-
- Iterator getRepositoryMetadataWarningIterator();
-
- int getFailures();
-
- int getSuccesses();
-
- int getWarnings();
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- * A result of the report for a given artifact being processed.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class ArtifactResult
-{
- private final Artifact artifact;
-
- private final String reason;
-
- public ArtifactResult( Artifact artifact )
- {
- this.artifact = artifact;
- this.reason = null;
- }
-
- public ArtifactResult( Artifact artifact, String reason )
- {
- this.artifact = artifact;
- this.reason = reason;
- }
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public String getReason()
- {
- return reason;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.Plugin;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * This class will report on bad metadata files. These include invalid version declarations and incomplete version
- * information inside the metadata file. Plugin metadata will be checked for validity of the latest plugin artifacts.
- *
- * @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="bad-metadata"
- */
-public class BadMetadataReportProcessor
- implements MetadataReportProcessor
-{
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private RepositoryQueryLayerFactory repositoryQueryLayerFactory;
-
- /**
- * Process the metadata encountered in the repository and report all errors found, if any.
- *
- * @param metadata the metadata to be processed.
- * @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- * @throws ReportProcessorException if an error was occurred while processing the metadata
- */
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- throws ReportProcessorException
- {
- boolean hasFailures = false;
-
- if ( metadata.storedInGroupDirectory() )
- {
- try
- {
- hasFailures = checkPluginMetadata( metadata, repository, reporter );
- }
- catch ( IOException e )
- {
- throw new ReportProcessorException( "Error getting plugin artifact directories versions", e );
- }
- }
- else
- {
- String lastUpdated = metadata.getMetadata().getVersioning().getLastUpdated();
- if ( lastUpdated == null || lastUpdated.length() == 0 )
- {
- reporter.addFailure( metadata, "Missing lastUpdated element inside the metadata." );
- hasFailures = true;
- }
-
- if ( metadata.storedInArtifactVersionDirectory() )
- {
- hasFailures |= checkSnapshotMetadata( metadata, repository, reporter );
- }
- else
- {
- if ( !checkMetadataVersions( metadata, repository, reporter ) )
- {
- hasFailures = true;
- }
-
- try
- {
- if ( checkRepositoryVersions( metadata, repository, reporter ) )
- {
- hasFailures = true;
- }
- }
- catch ( IOException e )
- {
- throw new ReportProcessorException( "Error getting versions", e );
- }
- }
- }
-
- if ( !hasFailures )
- {
- reporter.addSuccess( metadata );
- }
- }
-
- /**
- * Method for processing a GroupRepositoryMetadata
- *
- * @param metadata the metadata to be processed.
- * @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- */
- private boolean checkPluginMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
- throws IOException
- {
- boolean hasFailures = false;
-
- File metadataDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
- List pluginDirs = getArtifactIdFiles( metadataDir );
-
- Map prefixes = new HashMap();
- for ( Iterator plugins = metadata.getMetadata().getPlugins().iterator(); plugins.hasNext(); )
- {
- Plugin plugin = (Plugin) plugins.next();
-
- String artifactId = plugin.getArtifactId();
- if ( artifactId == null || artifactId.length() == 0 )
- {
- reporter.addFailure( metadata, "Missing or empty artifactId in group metadata." );
- hasFailures = true;
- }
-
- String prefix = plugin.getPrefix();
- if ( prefix == null || prefix.length() == 0 )
- {
- reporter.addFailure( metadata, "Missing or empty plugin prefix for artifactId " + artifactId + "." );
- hasFailures = true;
- }
- else
- {
- if ( prefixes.containsKey( prefix ) )
- {
- reporter.addFailure( metadata, "Duplicate plugin prefix found: " + prefix + "." );
- hasFailures = true;
- }
- else
- {
- prefixes.put( prefix, plugin );
- }
- }
-
- if ( artifactId != null && artifactId.length() > 0 )
- {
- File pluginDir = new File( metadataDir, artifactId );
- if ( !pluginDirs.contains( pluginDir ) )
- {
- reporter.addFailure( metadata, "Metadata plugin " + artifactId + " not found in the repository" );
- hasFailures = true;
- }
- else
- {
- pluginDirs.remove( pluginDir );
- }
- }
- }
-
- if ( pluginDirs.size() > 0 )
- {
- for ( Iterator plugins = pluginDirs.iterator(); plugins.hasNext(); )
- {
- File plugin = (File) plugins.next();
- reporter.addFailure( metadata, "Plugin " + plugin.getName() + " is present in the repository but " +
- "missing in the metadata." );
- }
- hasFailures = true;
- }
-
- return hasFailures;
- }
-
- /**
- * Method for processing a SnapshotArtifactRepository
- *
- * @param metadata the metadata to be processed.
- * @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- */
- private boolean checkSnapshotMetadata( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
- {
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
-
- boolean hasFailures = false;
-
- Snapshot snapshot = metadata.getMetadata().getVersioning().getSnapshot();
- String timestamp = snapshot.getTimestamp();
- String buildNumber = String.valueOf( snapshot.getBuildNumber() );
-
- Artifact artifact = createArtifact( metadata );
- if ( !repositoryQueryLayer.containsArtifact( artifact, snapshot ) )
- {
- reporter.addFailure( metadata, "Snapshot artifact " + timestamp + "-" + buildNumber + " does not exist." );
- hasFailures = true;
- }
-
- return hasFailures;
- }
-
- /**
- * Method for validating the versions declared inside an ArtifactRepositoryMetadata
- *
- * @param metadata the metadata to be processed.
- * @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- */
- private boolean checkMetadataVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
- {
- RepositoryQueryLayer repositoryQueryLayer =
- repositoryQueryLayerFactory.createRepositoryQueryLayer( repository );
-
- boolean hasFailures = false;
- Versioning versioning = metadata.getMetadata().getVersioning();
- for ( Iterator versions = versioning.getVersions().iterator(); versions.hasNext(); )
- {
- String version = (String) versions.next();
-
- Artifact artifact = createArtifact( metadata, version );
-
- if ( !repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addFailure( metadata, "Artifact version " + version + " is present in metadata but " +
- "missing in the repository." );
- hasFailures = true;
- }
- }
- return hasFailures;
- }
-
- /**
- * Searches the artifact repository directory for all versions and verifies that all of them are listed in the
- * ArtifactRepositoryMetadata
- *
- * @param metadata the metadata to be processed.
- * @param repository the repository where the metadata was encountered
- * @param reporter the ArtifactReporter to receive processing results
- */
- private boolean checkRepositoryVersions( RepositoryMetadata metadata, ArtifactRepository repository,
- ArtifactReporter reporter )
- throws IOException
- {
- boolean hasFailures = false;
- Versioning versioning = metadata.getMetadata().getVersioning();
- File versionsDir =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
- List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
- for ( Iterator i = versions.iterator(); i.hasNext(); )
- {
- File path = new File( (String) i.next() );
- String version = path.getParentFile().getName();
- if ( !versioning.getVersions().contains( version ) )
- {
- reporter.addFailure( metadata, "Artifact version " + version + " found in the repository but " +
- "missing in the metadata." );
- hasFailures = true;
- }
- }
- return hasFailures;
- }
-
- /**
- * Used to create an artifact object from a metadata base version
- */
- private Artifact createArtifact( RepositoryMetadata metadata )
- {
- return artifactFactory.createBuildArtifact( metadata.getGroupId(), metadata.getArtifactId(),
- metadata.getBaseVersion(), "pom" );
- }
-
- /**
- * Used to create an artifact object with a specified version
- */
- private Artifact createArtifact( RepositoryMetadata metadata, String version )
- {
- return artifactFactory.createBuildArtifact( metadata.getGroupId(), metadata.getArtifactId(), version, "pom" );
- }
-
- /**
- * Used to gather artifactIds from a groupId directory
- */
- private List getArtifactIdFiles( File groupIdDir )
- throws IOException
- {
- List artifactIdFiles = new ArrayList();
-
- List fileArray = new ArrayList( Arrays.asList( groupIdDir.listFiles() ) );
- for ( Iterator files = fileArray.iterator(); files.hasNext(); )
- {
- File artifactDir = (File) files.next();
-
- if ( artifactDir.isDirectory() )
- {
- List versions = FileUtils.getFileNames( artifactDir, "*/*.pom", null, false );
- if ( versions.size() > 0 )
- {
- artifactIdFiles.add( artifactDir );
- }
- }
- }
-
- return artifactIdFiles;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * Class to implement caching.
- */
-public class Cache
-{
- private final Map cache;
-
- private final double cacheHitRatio;
-
- private final int cacheMaxSize;
-
- private long cacheHits;
-
- private long cacheMiss;
-
- /**
- * Caches all data and expires only the oldest data when the specified cache hit rate is reached.
- */
- public Cache( double cacheHitRatio )
- {
- this( cacheHitRatio, 0 );
- }
-
- /**
- * Caches all data and expires only the oldest data when the maximum cache size is reached
- */
- public Cache( int cacheMaxSize )
- {
- this( (double) 1, cacheMaxSize );
- }
-
- /**
- * Caches all data and expires only the oldest data when either the specified cache hit rate is reached
- * or the maximum cache size is reached.
- */
- public Cache( double cacheHitRatio, int cacheMaxSize )
- {
- this.cacheHitRatio = cacheHitRatio;
- this.cacheMaxSize = cacheMaxSize;
-
- if ( cacheMaxSize > 0 )
- {
- cache = new LinkedHashMap( cacheMaxSize );
- }
- else
- {
- cache = new LinkedHashMap();
- }
- }
-
- /**
- * Check if the specified key is already mapped to an object.
- *
- * @param key the key used to map the cached object
- * @return true if the cache contains an object associated with the given key
- */
- public boolean containsKey( Object key )
- {
- boolean contains;
- synchronized ( cache )
- {
- contains = cache.containsKey( key );
-
- if ( contains )
- {
- cacheHits++;
- }
- else
- {
- cacheMiss++;
- }
- }
-
- return contains;
- }
-
- /**
- * Check for a cached object and return it if it exists. Returns null when the keyed object is not found
- *
- * @param key the key used to map the cached object
- * @return the object mapped to the given key, or null if no cache object is mapped to the given key
- */
- public Object get( Object key )
- {
- Object retValue = null;
-
- synchronized ( cache )
- {
- if ( cache.containsKey( key ) )
- {
- // remove and put: this promotes it to the top since we use a linked hash map
- retValue = cache.remove( key );
-
- cache.put( key, retValue );
-
- cacheHits++;
- }
- else
- {
- cacheMiss++;
- }
- }
-
- return retValue;
- }
-
- /**
- * Cache the given value and map it using the given key
- *
- * @param key the object to map the valued object
- * @param value the object to cache
- */
- public void put( Object key, Object value )
- {
- // remove and put: this promotes it to the top since we use a linked hash map
- synchronized ( cache )
- {
- if ( cache.containsKey( key ) )
- {
- cache.remove( key );
- }
-
- cache.put( key, value );
- }
-
- manageCache();
- }
-
- /**
- * Compute for the efficiency of this cache.
- *
- * @return the ratio of cache hits to the cache misses to queries for cache objects
- */
- public double getHitRate()
- {
- synchronized ( cache )
- {
- return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss );
- }
- }
-
- /**
- * Get the total number of cache objects currently cached.
- */
- public int size()
- {
- return cache.size();
- }
-
- /**
- * Empty the cache and reset the cache hit rate
- */
- public void clear()
- {
- synchronized ( cache )
- {
- cacheHits = 0;
- cacheMiss = 0;
- cache.clear();
- }
- }
-
- private void manageCache()
- {
- synchronized ( cache )
- {
- Iterator iterator = cache.entrySet().iterator();
- if ( cacheMaxSize == 0 )
- {
- //desired HitRatio is reached, we can trim the cache to conserve memory
- if ( cacheHitRatio <= getHitRate() )
- {
- iterator.next();
- iterator.remove();
- }
- }
- else if ( cache.size() > cacheMaxSize )
- {
- // maximum cache size is reached
- while ( cache.size() > cacheMaxSize )
- {
- iterator.next();
- iterator.remove();
- }
- }
- else
- {
- //even though the max has not been reached, the desired HitRatio is already reached,
- // so we can trim the cache to conserve memory
- if ( cacheHitRatio <= getHitRate() )
- {
- iterator.next();
- iterator.remove();
- }
- }
- }
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-
-
-/**
- *
- */
-public class CachedRepositoryQueryLayer
- extends AbstractRepositoryQueryLayer
-{
- private Cache cache;
-
- public static final double CACHE_HIT_RATIO = 0.5;
-
- public CachedRepositoryQueryLayer( ArtifactRepository repository )
- {
- this.repository = repository;
-
- cache = new Cache( CACHE_HIT_RATIO );
- }
-
- public double getCacheHitRate()
- {
- return cache.getHitRate();
- }
-
- public boolean containsArtifact( Artifact artifact )
- {
- boolean artifactFound = true;
-
- String artifactPath = repository.getBasedir() + "/" + repository.pathOf( artifact );
-
- if ( cache.get( artifactPath ) == null )
- {
- artifactFound = super.containsArtifact( artifact );
- if ( artifactFound )
- {
- cache.put( artifactPath, artifactPath );
- }
- }
-
- return artifactFound;
- }
-
- public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
- {
- boolean artifactFound = true;
-
- String path = getSnapshotArtifactRepositoryPath( artifact, snapshot );
-
- if ( cache.get( path ) == null )
- {
- artifactFound = super.containsArtifact( artifact, snapshot );
- if ( artifactFound )
- {
- cache.put( path, path );
- }
- }
-
- return artifactFound;
- }
-
- /**
- * Override method to utilize the cache
- */
- protected Metadata getMetadata( Artifact artifact )
- throws RepositoryQueryLayerException
- {
- Metadata metadata = (Metadata) cache.get( artifact.getId() );
-
- if ( metadata == null )
- {
- metadata = super.getMetadata( artifact );
- cache.put( artifact.getId(), metadata );
- }
-
- return metadata;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * This class reports invalid and mismatched checksums of artifacts and metadata files.
- * It validates MD5 and SHA-1 checksums.
- *
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="checksum"
- */
-public class ChecksumArtifactReporter
- implements ArtifactReportProcessor
-{
- /**
- * @plexus.requirement role-hint="sha1"
- */
- private Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- private Digester md5Digester;
-
- /**
- * Validate the checksum of the specified artifact.
- *
- * @param model
- * @param artifact
- * @param reporter
- * @param repository
- */
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
- }
-
- //check if checksum files exist
- String path = repository.pathOf( artifact );
- File file = new File( repository.getBasedir(), path );
-
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, artifact );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, artifact );
- }
-
- private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ArtifactReporter reporter, Artifact artifact )
- {
- File checksumFile = new File( repository.getBasedir(), path );
- if ( checksumFile.exists() )
- {
- try
- {
- digester.verify( file, FileUtils.fileRead( checksumFile ) );
-
- reporter.addSuccess( artifact );
- }
- catch ( DigesterException e )
- {
- reporter.addFailure( artifact, e.getMessage() );
- }
- catch ( IOException e )
- {
- reporter.addFailure( artifact, "Read file error: " + e.getMessage() );
- }
- }
- else
- {
- reporter.addFailure( artifact, digester.getAlgorithm() + " checksum file does not exist." );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * This class reports invalid and mismatched checksums of artifacts and metadata files.
- * It validates MD5 and SHA-1 checksums.
- *
- * @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="checksum-metadata"
- */
-public class ChecksumMetadataReporter
- implements MetadataReportProcessor
-{
- /**
- * @plexus.requirement role-hint="sha1"
- */
- private Digester sha1Digester;
-
- /**
- * @plexus.requirement role-hint="md5"
- */
- private Digester md5Digester;
-
- /**
- * Validate the checksums of the metadata. Get the metadata file from the
- * repository then validate the checksum.
- */
- public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
- }
-
- //check if checksum files exist
- String path = repository.pathOfRemoteRepositoryMetadata( metadata );
- File file = new File( repository.getBasedir(), path );
-
- verifyChecksum( repository, path + ".md5", file, md5Digester, reporter, metadata );
- verifyChecksum( repository, path + ".sha1", file, sha1Digester, reporter, metadata );
-
- }
-
- private void verifyChecksum( ArtifactRepository repository, String path, File file, Digester digester,
- ArtifactReporter reporter, RepositoryMetadata metadata )
- {
- File checksumFile = new File( repository.getBasedir(), path );
- if ( checksumFile.exists() )
- {
- try
- {
- digester.verify( file, FileUtils.fileRead( checksumFile ) );
-
- reporter.addSuccess( metadata );
- }
- catch ( DigesterException e )
- {
- reporter.addFailure( metadata, e.getMessage() );
- }
- catch ( IOException e )
- {
- reporter.addFailure( metadata, "Read file error: " + e.getMessage() );
- }
- }
- else
- {
- reporter.addFailure( metadata, digester.getAlgorithm() + " checksum file does not exist." );
- }
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Dependency;
-import org.apache.maven.model.Model;
-
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="default"
- */
-public class DefaultArtifactReportProcessor
- implements ArtifactReportProcessor
-{
- private static final String EMPTY_STRING = "";
-
- // plexus components
- private ArtifactFactory artifactFactory;
-
- private RepositoryQueryLayer repositoryQueryLayer;
-
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- {
- if ( artifact == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.NULL_ARTIFACT );
- }
- else
- {
- processArtifact( artifact, reporter );
- }
-
- if ( model == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.NULL_MODEL );
- }
- else
- {
- List dependencies = model.getDependencies();
- processDependencies( dependencies, reporter );
- }
- }
-
- private void processArtifact( Artifact artifact, ArtifactReporter reporter )
- {
- boolean hasFailed = false;
- if ( EMPTY_STRING.equals( artifact.getGroupId() ) || artifact.getGroupId() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_GROUP_ID );
- hasFailed = true;
- }
- if ( EMPTY_STRING.equals( artifact.getArtifactId() ) || artifact.getArtifactId() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_ARTIFACT_ID );
- hasFailed = true;
- }
- if ( EMPTY_STRING.equals( artifact.getVersion() ) || artifact.getVersion() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_VERSION );
- hasFailed = true;
- }
- if ( !hasFailed )
- {
- if ( repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addSuccess( artifact );
- }
- else
- {
- reporter.addFailure( artifact, ArtifactReporter.ARTIFACT_NOT_FOUND );
- }
- }
- }
-
- private void processDependencies( List dependencies, ArtifactReporter reporter )
- {
- if ( dependencies.size() > 0 )
- {
- Iterator iterator = dependencies.iterator();
- while ( iterator.hasNext() )
- {
- boolean hasFailed = false;
- Dependency dependency = (Dependency) iterator.next();
- Artifact artifact = createArtifact( dependency );
- if ( EMPTY_STRING.equals( dependency.getGroupId() ) || dependency.getGroupId() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID );
- hasFailed = true;
- }
- if ( EMPTY_STRING.equals( dependency.getArtifactId() ) || dependency.getArtifactId() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID );
- hasFailed = true;
- }
- if ( EMPTY_STRING.equals( dependency.getVersion() ) || dependency.getVersion() == null )
- {
- reporter.addFailure( artifact, ArtifactReporter.EMPTY_DEPENDENCY_VERSION );
- hasFailed = true;
- }
- if ( !hasFailed )
- {
- if ( repositoryQueryLayer.containsArtifact( artifact ) )
- {
- reporter.addSuccess( artifact );
- }
- else
- {
- reporter.addFailure( artifact, ArtifactReporter.DEPENDENCY_NOT_FOUND );
- }
- }
- }
- }
-
- }
-
- /**
- * Only used for passing a mock object when unit testing
- *
- * @param repositoryQueryLayer
- */
- protected void setRepositoryQueryLayer( RepositoryQueryLayer repositoryQueryLayer )
- {
- this.repositoryQueryLayer = repositoryQueryLayer;
- }
-
- /**
- * Only used for passing a mock object when unit testing
- *
- * @param artifactFactory
- */
- protected void setArtifactFactory( ArtifactFactory artifactFactory )
- {
- this.artifactFactory = artifactFactory;
- }
-
- private Artifact createArtifact( Dependency dependency )
- {
- return artifactFactory.createBuildArtifact( dependency.getGroupId(), dependency.getArtifactId(),
- dependency.getVersion(), "pom" );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReporter" role-hint="default"
- */
-public class DefaultArtifactReporter
- implements ArtifactReporter
-{
- private List artifactFailures = new ArrayList();
-
- private List artifactSuccesses = new ArrayList();
-
- private List artifactWarnings = new ArrayList();
-
- private List metadataFailures = new ArrayList();
-
- private List metadataSuccesses = new ArrayList();
-
- private List metadataWarnings = new ArrayList();
-
- public void addFailure( Artifact artifact, String reason )
- {
- artifactFailures.add( new ArtifactResult( artifact, reason ) );
- }
-
- public void addSuccess( Artifact artifact )
- {
- artifactSuccesses.add( new ArtifactResult( artifact ) );
- }
-
- public void addWarning( Artifact artifact, String message )
- {
- artifactWarnings.add( new ArtifactResult( artifact, message ) );
- }
-
- public void addFailure( RepositoryMetadata metadata, String reason )
- {
- metadataFailures.add( new RepositoryMetadataResult( metadata, reason ) );
- }
-
- public void addSuccess( RepositoryMetadata metadata )
- {
- metadataSuccesses.add( new RepositoryMetadataResult( metadata ) );
- }
-
- public void addWarning( RepositoryMetadata metadata, String message )
- {
- metadataWarnings.add( new RepositoryMetadataResult( metadata, message ) );
- }
-
- public Iterator getArtifactFailureIterator()
- {
- return artifactFailures.iterator();
- }
-
- public Iterator getArtifactSuccessIterator()
- {
- return artifactSuccesses.iterator();
- }
-
- public Iterator getArtifactWarningIterator()
- {
- return artifactWarnings.iterator();
- }
-
- public Iterator getRepositoryMetadataFailureIterator()
- {
- return metadataFailures.iterator();
- }
-
- public Iterator getRepositoryMetadataSuccessIterator()
- {
- return metadataSuccesses.iterator();
- }
-
- public Iterator getRepositoryMetadataWarningIterator()
- {
- return metadataWarnings.iterator();
- }
-
- public int getFailures()
- {
- return artifactFailures.size() + metadataFailures.size();
- }
-
- public int getSuccesses()
- {
- return artifactSuccesses.size() + metadataSuccesses.size();
- }
-
- public int getWarnings()
- {
- return artifactWarnings.size() + metadataWarnings.size();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- *
- */
-public class DefaultRepositoryQueryLayer
- extends AbstractRepositoryQueryLayer
-{
- public DefaultRepositoryQueryLayer( ArtifactRepository repository )
- {
- this.repository = repository;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * Gets the default implementation of a repository query layer for the given repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- * @plexus.component role="org.apache.maven.repository.reporting.RepositoryQueryLayerFactory"
- */
-public class DefaultRepositoryQueryLayerFactory
- implements RepositoryQueryLayerFactory
-{
- public RepositoryQueryLayer createRepositoryQueryLayer( ArtifactRepository repository )
- {
- return new DefaultRepositoryQueryLayer( repository );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.File;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.lucene.LuceneQuery;
-import org.apache.maven.repository.indexing.record.StandardArtifactIndexRecord;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-
-/**
- * Validates an artifact file for duplicates within the same groupId based from what's available in a repository index.
- *
- * @author Edwin Punzalan
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="duplicate"
- */
-public class DuplicateArtifactFileReportProcessor
- implements ArtifactReportProcessor
-{
- /**
- * @plexus.requirement role-hint="md5"
- */
- private Digester digester;
-
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory indexFactory;
-
- /**
- * @plexus.configuration
- */
- private String indexDirectory;
-
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- throws ReportProcessorException
- {
- if ( artifact.getFile() != null )
- {
- RepositoryArtifactIndex index = indexFactory.createStandardIndex( new File( indexDirectory ) );
-
- String checksum;
- try
- {
- checksum = digester.calc( artifact.getFile() );
- }
- catch ( DigesterException e )
- {
- throw new ReportProcessorException( "Failed to generate checksum", e );
- }
-
- try
- {
- List results = index.search( new LuceneQuery(
- new TermQuery( new Term( StandardIndexRecordFields.MD5, checksum.toLowerCase() ) ) ) );
-
- if ( results.isEmpty() )
- {
- reporter.addSuccess( artifact );
- }
- else
- {
- boolean hasDuplicates = false;
- for ( Iterator i = results.iterator(); i.hasNext(); )
- {
- StandardArtifactIndexRecord result = (StandardArtifactIndexRecord) i.next();
-
- //make sure it is not the same artifact
- if ( !result.getFilename().equals( repository.pathOf( artifact ) ) )
- {
- //report only duplicates from the same groupId
- String groupId = artifact.getGroupId();
- if ( groupId.equals( result.getGroupId() ) )
- {
- hasDuplicates = true;
- reporter.addFailure( artifact, "Found duplicate for " + artifact.getId() );
- }
- }
- }
-
- if ( !hasDuplicates )
- {
- reporter.addSuccess( artifact );
- }
- }
- }
- catch ( RepositoryIndexSearchException e )
- {
- throw new ReportProcessorException( "Failed to search in index", e );
- }
- }
- else
- {
- reporter.addWarning( artifact, "Artifact file is null" );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-
-/**
- * This class validates well-formedness of pom xml file.
- *
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="invalid-pom"
- */
-public class InvalidPomArtifactReportProcessor
- implements ArtifactReportProcessor
-{
- /**
- * @param model
- * @param artifact The pom xml file to be validated, passed as an artifact object.
- * @param reporter The artifact reporter object.
- * @param repository the repository where the artifact is located.
- */
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
- }
-
- if ( "pom".equals( artifact.getType().toLowerCase() ) )
- {
- File f = new File( repository.getBasedir(), repository.pathOf( artifact ) );
-
- if ( !f.exists() )
- {
- reporter.addFailure( artifact, "Artifact not found." );
- }
- else
- {
- Reader reader = null;
-
- MavenXpp3Reader pomReader = new MavenXpp3Reader();
-
- try
- {
- reader = new FileReader( f );
- pomReader.read( reader );
- reporter.addSuccess( artifact );
- }
- catch ( XmlPullParserException e )
- {
- reporter.addFailure( artifact, "The pom xml file is not well-formed. Error while parsing: " +
- e.getMessage() );
- }
- catch ( FileNotFoundException e )
- {
- reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
- }
- catch ( IOException e )
- {
- reporter.addFailure( artifact, "Error while reading the pom xml file: " + e.getMessage() );
- }
- finally
- {
- IOUtil.close( reader );
- }
- }
- }
- else
- {
- reporter.addWarning( artifact, "The artifact is not a pom xml file." );
- }
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.util.jar.JarEntry;
-import java.util.jar.JarFile;
-
-/**
- * Validate the location of the artifact based on the values indicated
- * in its pom (both the pom packaged with the artifact & the pom in the
- * file system).
- * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="artifact-location"
- */
-public class LocationArtifactReportProcessor
- implements ArtifactReportProcessor
-{
- /** @plexus.requirement */
- private ArtifactFactory artifactFactory;
-
- /**
- * Check whether the artifact is in its proper location. The location of the artifact
- * is validated first against the groupId, artifactId and versionId in the specified model
- * object (pom in the file system). Then unpack the artifact (jar file) and get the model (pom)
- * included in the package. If a model exists inside the package, then check if the artifact's
- * location is valid based on the location specified in the pom. Check if the both the location
- * specified in the file system pom and in the pom included in the package is the same.
- *
- * @param model Represents the pom in the file system.
- * @param artifact
- * @param reporter
- * @param repository
- */
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- throws ReportProcessorException
- {
- if ( !"file".equals( repository.getProtocol() ) )
- {
- // We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
- throw new UnsupportedOperationException(
- "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
- }
-
- //check if the artifact is located in its proper location based on the info
- //specified in the model object/pom
- Artifact modelArtifact = artifactFactory.createBuildArtifact( model.getGroupId(), model.getArtifactId(),
- model.getVersion(), model.getPackaging() );
-
- boolean failed = false;
- String modelPath = repository.pathOf( modelArtifact );
- String artifactPath = repository.pathOf( artifact );
- if ( modelPath.equals( artifactPath ) )
- {
- //get the location of the artifact itself
- File file = new File( repository.getBasedir(), artifactPath );
-
- if ( file.exists() )
- {
- //unpack the artifact (using the groupId, artifactId & version specified in the artifact object itself
- //check if the pom is included in the package
- Model extractedModel = readArtifactModel( file, artifact.getGroupId(), artifact.getArtifactId() );
-
- if ( extractedModel != null )
- {
- Artifact extractedArtifact = artifactFactory.createBuildArtifact( extractedModel.getGroupId(),
- extractedModel.getArtifactId(),
- extractedModel.getVersion(),
- extractedModel.getPackaging() );
- if ( !repository.pathOf( extractedArtifact ).equals( artifactPath ) )
- {
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not match the specified location in the packaged pom." );
- failed = true;
- }
- }
- }
- else
- {
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not exist at the specified location in the repository pom." );
- failed = true;
- }
- }
- else
- {
- reporter.addFailure( artifact,
- "The artifact is out of place. It does not match the specified location in the repository pom." );
- failed = true;
- }
-
- if ( !failed )
- {
- reporter.addSuccess( artifact );
- }
- }
-
- /**
- * Extract the contents of the artifact/jar file.
- *
- * @param file
- * @param groupId
- * @param artifactId
- */
- private Model readArtifactModel( File file, String groupId, String artifactId )
- throws ReportProcessorException
- {
- Model model = null;
-
- JarFile jar = null;
- try
- {
- jar = new JarFile( file );
-
- //Get the entry and its input stream.
- JarEntry entry = jar.getJarEntry( "META-INF/maven/" + groupId + "/" + artifactId + "/pom.xml" );
-
- // If the entry is not null, extract it.
- if ( entry != null )
- {
- model = readModel( jar.getInputStream( entry ) );
- }
- }
- catch ( IOException e )
- {
- // TODO: should just warn and continue?
- throw new ReportProcessorException( "Unable to read artifact to extract model", e );
- }
- catch ( XmlPullParserException e )
- {
- // TODO: should just warn and continue?
- throw new ReportProcessorException( "Unable to read artifact to extract model", e );
- }
- finally
- {
- if ( jar != null )
- {
- //noinspection UnusedCatchParameter
- try
- {
- jar.close();
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
- }
- return model;
- }
-
- private Model readModel( InputStream entryStream )
- throws IOException, XmlPullParserException
- {
- Reader isReader = new InputStreamReader( entryStream );
-
- Model model;
- try
- {
- MavenXpp3Reader pomReader = new MavenXpp3Reader();
- model = pomReader.read( isReader );
- }
- finally
- {
- IOUtil.close( isReader );
- }
- return model;
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * This interface is called by the main system for each piece of metadata as it is discovered.
- */
-public interface MetadataReportProcessor
-{
- String ROLE = MetadataReportProcessor.class.getName();
-
- void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
- throws ReportProcessorException;
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Exception occurring during reporting.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class ReportProcessorException
- extends Exception
-{
- public ReportProcessorException( String msg, Throwable cause )
- {
- super( msg, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-/**
- * A result of the report for a given artifact being processed.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class RepositoryMetadataResult
-{
- private final RepositoryMetadata metadata;
-
- private final String reason;
-
- public RepositoryMetadataResult( RepositoryMetadata metadata )
- {
- this.metadata = metadata;
- this.reason = null;
- }
-
- public RepositoryMetadataResult( RepositoryMetadata metadata, String reason )
- {
- this.metadata = metadata;
- this.reason = reason;
- }
-
- public RepositoryMetadata getMetadata()
- {
- return metadata;
- }
-
- public String getReason()
- {
- return reason;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-
-import java.util.List;
-
-/**
- * The transitive and metadata validation reports will need to query the repository for artifacts.
- */
-public interface RepositoryQueryLayer
-{
- String ROLE = RepositoryQueryLayer.class.getName();
-
- boolean containsArtifact( Artifact artifact );
-
- /** @todo I believe we can remove this [BP] - artifact should contain all the necessary version info */
- boolean containsArtifact( Artifact artifact, Snapshot snapshot );
-
- List getVersions( Artifact artifact )
- throws RepositoryQueryLayerException;
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- *
- */
-public class RepositoryQueryLayerException
- extends Exception
-{
- public RepositoryQueryLayerException( String message, Throwable cause )
- {
- super( message, cause );
- }
-
- public RepositoryQueryLayerException( String message )
- {
- super( message );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * Gets the preferred implementation of a repository query layer for the given repository.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public interface RepositoryQueryLayerFactory
-{
- String ROLE = RepositoryQueryLayerFactory.class.getName();
-
- /**
- * Create or obtain a query interface.
- *
- * @param repository the repository to query
- * @return the obtained query layer
- */
- RepositoryQueryLayer createRepositoryQueryLayer( ArtifactRepository repository );
-}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.Digester;
+import org.apache.maven.archiva.digest.DigesterException;
+import org.codehaus.plexus.util.FileUtils;
+import org.codehaus.plexus.util.IOUtil;
+
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.util.jar.JarEntry;
+import java.util.jar.JarOutputStream;
+
+/**
+ * This class creates the artifact and metadata files used for testing the ChecksumArtifactReporter.
+ * It is extended by ChecksumArtifactReporterTest class.
+ */
+public abstract class AbstractChecksumArtifactReporterTestCase
+ extends AbstractRepositoryReportsTestCase
+{
+ private static final String[] validArtifactChecksumJars = {"validArtifact-1.0"};
+
+ private static final String[] invalidArtifactChecksumJars = {"invalidArtifact-1.0"};
+
+ private static final String metadataChecksumFilename = "maven-metadata";
+
+ private Digester sha1Digest;
+
+ private Digester md5Digest;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
+ md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
+ }
+
+ /**
+ * Create checksum files.
+ *
+ * @param type The type of checksum file to be created.
+ */
+ protected void createChecksumFile( String type )
+ throws DigesterException, IOException
+ {
+ //loop through the valid artifact names..
+ if ( "VALID".equals( type ) )
+ {
+ for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+ {
+ writeChecksumFile( "checksumTest/", validArtifactChecksumJars[i], "jar", true );
+ }
+ }
+ else if ( "INVALID".equals( type ) )
+ {
+ for ( int i = 0; i < invalidArtifactChecksumJars.length; i++ )
+ {
+ writeChecksumFile( "checksumTest/", invalidArtifactChecksumJars[i], "jar", false );
+ }
+ }
+ }
+
+ /**
+ * Create checksum files for metadata.
+ *
+ * @param type The type of checksum to be created. (Valid or invalid)
+ */
+ protected void createMetadataFile( String type )
+ throws DigesterException, IOException
+ {
+ //loop through the valid artifact names..
+ if ( "VALID".equals( type ) )
+ {
+ writeMetadataFile( "checksumTest/validArtifact/1.0/", metadataChecksumFilename, "xml", true );
+ writeMetadataFile( "checksumTest/validArtifact/", metadataChecksumFilename, "xml", true );
+ writeMetadataFile( "checksumTest/", metadataChecksumFilename, "xml", true );
+ }
+ else if ( "INVALID".equals( type ) )
+ {
+ writeMetadataFile( "checksumTest/invalidArtifact/1.0/", metadataChecksumFilename, "xml", false );
+ }
+ }
+
+ /**
+ * Create artifact together with its checksums.
+ *
+ * @param relativePath The groupId
+ * @param filename The filename of the artifact to be created.
+ * @param type The file type (JAR)
+ * @param isValid Indicates whether the checksum to be created is valid or not.
+ */
+ private void writeChecksumFile( String relativePath, String filename, String type, boolean isValid )
+ throws IOException, DigesterException
+ {
+ //Initialize variables for creating jar files
+ String repoUrl = repository.getBasedir();
+
+ String dirs = filename.replace( '-', '/' );
+ //create the group level directory of the artifact
+ File dirFiles = new File( repoUrl + relativePath + dirs );
+
+ if ( dirFiles.mkdirs() )
+ {
+ // create a jar file
+ String path = repoUrl + relativePath + dirs + "/" + filename + "." + type;
+ FileOutputStream f = new FileOutputStream( path );
+ JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
+
+ // jar sample.txt
+ String filename1 = repoUrl + relativePath + dirs + "/sample.txt";
+ createSampleFile( filename1 );
+
+ BufferedReader in = new BufferedReader( new FileReader( filename1 ) );
+ out.putNextEntry( new JarEntry( filename1 ) );
+ IOUtil.copy( in, out );
+ in.close();
+ out.close();
+
+ //Create md5 and sha-1 checksum files..
+
+ File file = new File( path + ".md5" );
+ OutputStream os = new FileOutputStream( file );
+ OutputStreamWriter osw = new OutputStreamWriter( os );
+ String sum = md5Digest.calc( new File( path ) );
+ if ( !isValid )
+ {
+ osw.write( sum + "1" );
+ }
+ else
+ {
+ osw.write( sum );
+ }
+ osw.close();
+
+ file = new File( path + ".sha1" );
+ os = new FileOutputStream( file );
+ osw = new OutputStreamWriter( os );
+ String sha1sum = sha1Digest.calc( new File( path ) );
+ if ( !isValid )
+ {
+ osw.write( sha1sum + "2" );
+ }
+ else
+ {
+ osw.write( sha1sum );
+ }
+ osw.close();
+ }
+ }
+
+ /**
+ * Create metadata file together with its checksums.
+ *
+ * @param relativePath The groupId
+ * @param filename The filename of the artifact to be created.
+ * @param type The file type (JAR)
+ * @param isValid Indicates whether the checksum to be created is valid or not.
+ */
+ private void writeMetadataFile( String relativePath, String filename, String type, boolean isValid )
+ throws IOException, DigesterException
+ {
+ //create checksum for the metadata file..
+ String repoUrl = repository.getBasedir();
+ String url = repository.getBasedir() + "/" + filename + "." + type;
+
+ String path = repoUrl + relativePath + filename + "." + type;
+ FileUtils.copyFile( new File( url ), new File( path ) );
+
+ //Create md5 and sha-1 checksum files..
+ File file = new File( path + ".md5" );
+ OutputStream os = new FileOutputStream( file );
+ OutputStreamWriter osw = new OutputStreamWriter( os );
+ String md5sum = md5Digest.calc( new File( path ) );
+ if ( !isValid )
+ {
+ osw.write( md5sum + "1" );
+ }
+ else
+ {
+ osw.write( md5sum );
+ }
+ osw.close();
+
+ file = new File( path + ".sha1" );
+ os = new FileOutputStream( file );
+ osw = new OutputStreamWriter( os );
+ String sha1sum = sha1Digest.calc( new File( path ) );
+ if ( !isValid )
+ {
+ osw.write( sha1sum + "2" );
+ }
+ else
+ {
+ osw.write( sha1sum );
+ }
+ osw.close();
+ }
+
+ /**
+ * Create the sample file that will be included in the jar.
+ *
+ * @param filename
+ */
+ private void createSampleFile( String filename )
+ throws IOException
+ {
+ File file = new File( filename );
+ OutputStream os = new FileOutputStream( file );
+ OutputStreamWriter osw = new OutputStreamWriter( os );
+ osw.write( "This is the content of the sample file that will be included in the jar file." );
+ osw.close();
+ }
+
+ /**
+ * Delete the test directory created in the repository.
+ *
+ * @param dir The directory to be deleted.
+ */
+ protected void deleteTestDirectory( File dir )
+ {
+ try
+ {
+ FileUtils.deleteDirectory( dir );
+ }
+ catch ( IOException e )
+ {
+ // ignore
+ }
+ }
+
+ private void deleteFile( String filename )
+ {
+ File f = new File( filename );
+ f.delete();
+ }
+
+ protected void deleteChecksumFiles( String type )
+ {
+ //delete valid checksum files of artifacts created
+ for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+ {
+ deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+ "/" + validArtifactChecksumJars[i] + "." + type + ".md5" );
+
+ deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+ "/" + validArtifactChecksumJars[i] + "." + type + ".sha1" );
+ }
+
+ //delete valid checksum files of metadata file
+ for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
+ {
+ deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+ "/" + metadataChecksumFilename + ".xml.md5" );
+
+ deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
+ "/" + metadataChecksumFilename + ".xml.sha1" );
+ }
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ *
+ */
+public abstract class AbstractRepositoryQueryLayerTestCase
+ extends PlexusTestCase
+{
+ private ArtifactFactory artifactFactory;
+
+ protected ArtifactRepository repository;
+
+ protected CachedRepositoryQueryLayer queryLayer;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+ File repositoryDirectory = getTestFile( "src/test/repository" );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ repository =
+ factory.createArtifactRepository( "test", repositoryDirectory.toURL().toString(), layout, null, null );
+ }
+
+ public void testContainsArtifactTrue()
+ {
+ Artifact artifact = getArtifact( "groupId", "artifactId", "1.0-alpha-1" );
+
+ assertTrue( "check artifact", queryLayer.containsArtifact( artifact ) );
+ }
+
+ public void testContainsArtifactFalse()
+ {
+ Artifact artifact = getArtifact( "groupId", "artifactId", "1.0-beta-1" );
+
+ assertFalse( "check non-existent artifact", queryLayer.containsArtifact( artifact ) );
+ }
+
+ public void testContainsSnapshotArtifactTrue()
+ {
+ Snapshot snapshot = new Snapshot();
+ snapshot.setTimestamp( "20050611.202024" );
+ snapshot.setBuildNumber( 1 );
+
+ Artifact artifact = getArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT" );
+ assertTrue( "check for snapshot artifact", queryLayer.containsArtifact( artifact, snapshot ) );
+ }
+
+ public void testContainsSnapshotArtifactFalse()
+ {
+ Snapshot snapshot = new Snapshot();
+ snapshot.setTimestamp( "20050611.202024" );
+ snapshot.setBuildNumber( 2 );
+
+ Artifact artifact = getArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT" );
+ assertFalse( "check for non-existent snapshot artifact", queryLayer.containsArtifact( artifact, snapshot ) );
+ }
+
+ public void testArtifactVersionsTrue()
+ throws Exception
+ {
+ Artifact artifact = getArtifact( "groupId", "artifactId", "ignored" );
+
+ List versions = queryLayer.getVersions( artifact );
+
+ assertTrue( "check version 1.0-alpha-1", versions.contains( "1.0-alpha-1" ) );
+ assertTrue( "check version 1.0-alpha-2", versions.contains( "1.0-alpha-2" ) );
+ assertFalse( "check version 1.0-alpha-3", versions.contains( "1.0-alpha-3" ) );
+ }
+
+ public void testArtifactVersionsFalse()
+ throws Exception
+ {
+ Artifact artifact = getArtifact( "groupId", "artifactId", "ignored" );
+
+ List versions = queryLayer.getVersions( artifact );
+
+ assertTrue( "check version 1.0-alpha-1", versions.contains( "1.0-alpha-1" ) );
+ assertTrue( "check version 1.0-alpha-2", versions.contains( "1.0-alpha-2" ) );
+ assertFalse( "check version 1.0-alpha-3", versions.contains( "1.0-alpha-3" ) );
+ }
+
+ public void testArtifactVersionsError()
+ {
+ Artifact artifact = getArtifact( "groupId", "none", "ignored" );
+
+ try
+ {
+ queryLayer.getVersions( artifact );
+ fail( "expected error not thrown" );
+ }
+ catch ( RepositoryQueryLayerException e )
+ {
+ //expected
+ }
+ }
+
+ private Artifact getArtifact( String groupId, String artifactId, String version )
+ {
+ return artifactFactory.createBuildArtifact( groupId, artifactId, version, "pom" );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ release( artifactFactory );
+ super.tearDown();
+ artifactFactory = null;
+ repository = null;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
+import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+
+/**
+ *
+ */
+public abstract class AbstractRepositoryReportsTestCase
+ extends PlexusTestCase
+{
+ /**
+ * This should only be used for the few that can't use the query layer.
+ */
+ protected ArtifactRepository repository;
+
+ protected static final String remoteRepoUrl = "http://public.planetmirror.com/pub/maven2/";
+
+ protected static final String remoteArtifactGroup = "HTTPClient";
+
+ protected static final String remoteArtifactId = "HTTPClient";
+
+ protected static final String remoteArtifactVersion = "0.3-3";
+
+ protected static final String remoteArtifactScope = "compile";
+
+ protected static final String remoteArtifactType = "jar";
+
+ protected static final String remoteRepoId = "remote-repo";
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+ File repositoryDirectory = getTestFile( "src/test/repository" );
+
+ ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
+ ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
+
+ repository = factory.createArtifactRepository( "repository", repositoryDirectory.toURL().toString(), layout,
+ null, null );
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.model.Dependency;
+import org.apache.maven.model.Model;
+
+import java.util.Iterator;
+
+/**
+ *
+ */
+public class ArtifactReportProcessorTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private static final String EMPTY_STRING = "";
+
+ private static final String VALID = "temp";
+
+ private MockArtifactReporter reporter;
+
+ private Artifact artifact;
+
+ private Model model;
+
+ private DefaultArtifactReportProcessor processor;
+
+ private static final boolean ARTIFACT_FOUND = true;
+
+ private static final boolean ARTIFACT_NOT_FOUND = false;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+ reporter = new MockArtifactReporter();
+ artifact = new MockArtifact();
+ model = new Model();
+ processor = new DefaultArtifactReportProcessor();
+ }
+
+ public void testNullArtifact()
+ {
+ processor.processArtifact( model, null, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.NULL_ARTIFACT, result.getReason() );
+ }
+
+ public void testNoProjectDescriptor()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ processor.processArtifact( null, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.NULL_MODEL, result.getReason() );
+ }
+
+ public void testArtifactFoundButNoDirectDependencies()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testArtifactNotFound()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.ARTIFACT_NOT_FOUND, result.getReason() );
+ }
+
+ public void testValidArtifactWithNullDependency()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, VALID, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 2, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testValidArtifactWithValidSingleDependency()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, VALID, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 2, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testValidArtifactWithValidMultipleDependencies()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, VALID, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 6, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testValidArtifactWithAnInvalidDependency()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, VALID, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 5, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
+ }
+
+ public void testEmptyGroupId()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, EMPTY_STRING, VALID, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
+ }
+
+ public void testEmptyArtifactId()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, VALID, EMPTY_STRING, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
+ }
+
+ public void testEmptyVersion()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, VALID, VALID, EMPTY_STRING );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
+ }
+
+ public void testNullGroupId()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, null, VALID, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
+ }
+
+ public void testNullArtifactId()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, VALID, null, VALID );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
+ }
+
+ public void testNullVersion()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, VALID, VALID, null );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
+ }
+
+ public void testMultipleFailures()
+ {
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+ processor.setRepositoryQueryLayer( queryLayer );
+
+ setRequiredElements( artifact, null, null, null );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 3, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
+ result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
+ result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
+ }
+
+ public void testValidArtifactWithInvalidDependencyGroupId()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, null, VALID, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID, result.getReason() );
+ }
+
+ public void testValidArtifactWithInvalidDependencyArtifactId()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, null, VALID );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID, result.getReason() );
+ }
+
+ public void testValidArtifactWithInvalidDependencyVersion()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, VALID, VALID, null );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_VERSION, result.getReason() );
+ }
+
+ public void testValidArtifactWithInvalidDependencyRequiredElements()
+ {
+ MockArtifactFactory artifactFactory = new MockArtifactFactory();
+ processor.setArtifactFactory( artifactFactory );
+
+ setRequiredElements( artifact, VALID, VALID, VALID );
+ MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ Dependency dependency = new Dependency();
+ setRequiredElements( dependency, null, null, null );
+ model.addDependency( dependency );
+ queryLayer.addReturnValue( ARTIFACT_FOUND );
+
+ processor.setRepositoryQueryLayer( queryLayer );
+ processor.processArtifact( model, artifact, reporter, null );
+ assertEquals( 1, reporter.getSuccesses() );
+ assertEquals( 3, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+
+ Iterator failures = reporter.getArtifactFailureIterator();
+ ArtifactResult result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID, result.getReason() );
+ result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID, result.getReason() );
+ result = (ArtifactResult) failures.next();
+ assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_VERSION, result.getReason() );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ model = null;
+ artifact = null;
+ reporter = null;
+ super.tearDown();
+ }
+
+ private void setRequiredElements( Artifact artifact, String groupId, String artifactId, String version )
+ {
+ artifact.setGroupId( groupId );
+ artifact.setArtifactId( artifactId );
+ artifact.setVersion( version );
+ }
+
+ private void setRequiredElements( Dependency dependency, String groupId, String artifactId, String version )
+ {
+ dependency.setGroupId( groupId );
+ dependency.setArtifactId( artifactId );
+ dependency.setVersion( version );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+import org.apache.maven.model.Model;
+
+import java.util.Iterator;
+
+/**
+ *
+ */
+public class ArtifactReporterTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private ArtifactReporter reporter;
+
+ private Artifact artifact;
+
+ private MockArtifactReportProcessor processor;
+
+ private Model model;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+ reporter = new DefaultArtifactReporter();
+ ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+ processor = new MockArtifactReportProcessor();
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.setLastUpdated( "20050611.202020" );
+ model = new Model();
+ }
+
+ public void testArtifactReporterSingleSuccess()
+ {
+ processor.addReturnValue( ReportCondition.SUCCESS, artifact, "all is good" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator success = reporter.getArtifactSuccessIterator();
+ assertTrue( success.hasNext() );
+ assertEquals( 1, reporter.getSuccesses() );
+ Artifact result = ( (ArtifactResult) success.next() ).getArtifact();
+ assertEquals( "groupId", result.getGroupId() );
+ assertEquals( "artifactId", result.getArtifactId() );
+ assertEquals( "1.0-alpha-1", result.getVersion() );
+ assertFalse( success.hasNext() );
+ }
+
+ public void testArtifactReporterMultipleSuccess()
+ {
+ processor.clearList();
+ processor.addReturnValue( ReportCondition.SUCCESS, artifact, "one" );
+ processor.addReturnValue( ReportCondition.SUCCESS, artifact, "two" );
+ processor.addReturnValue( ReportCondition.SUCCESS, artifact, "three" );
+ reporter = new DefaultArtifactReporter();
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator success = reporter.getArtifactSuccessIterator();
+ assertTrue( success.hasNext() );
+ int i;
+ for ( i = 0; success.hasNext(); i++ )
+ {
+ success.next();
+ }
+ assertEquals( 3, i );
+ assertEquals( 3, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testArtifactReporterSingleFailure()
+ {
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator failure = reporter.getArtifactFailureIterator();
+ assertTrue( failure.hasNext() );
+ failure.next();
+ assertFalse( failure.hasNext() );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 1, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testArtifactReporterMultipleFailure()
+ {
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed twice" );
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed thrice" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator failure = reporter.getArtifactFailureIterator();
+ assertTrue( failure.hasNext() );
+ int i;
+ for ( i = 0; failure.hasNext(); i++ )
+ {
+ failure.next();
+ }
+ assertEquals( 3, i );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 3, reporter.getFailures() );
+ assertEquals( 0, reporter.getWarnings() );
+ }
+
+ public void testFailureMessages()
+ {
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed twice" );
+ processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed thrice" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator failure = reporter.getArtifactFailureIterator();
+ assertEquals( "failed once", ( (ArtifactResult) failure.next() ).getReason() );
+ assertEquals( "failed twice", ( (ArtifactResult) failure.next() ).getReason() );
+ assertEquals( "failed thrice", ( (ArtifactResult) failure.next() ).getReason() );
+ }
+
+ public void testArtifactReporterSingleWarning()
+ {
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "you've been warned" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator warning = reporter.getArtifactWarningIterator();
+ assertTrue( warning.hasNext() );
+ warning.next();
+ assertFalse( warning.hasNext() );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 1, reporter.getWarnings() );
+ }
+
+ public void testArtifactReporterMultipleWarning()
+ {
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "i'm warning you" );
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "you have to stop now" );
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "all right... that does it!" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator warning = reporter.getArtifactWarningIterator();
+ assertTrue( warning.hasNext() );
+ int i;
+ for ( i = 0; warning.hasNext(); i++ )
+ {
+ warning.next();
+ }
+ assertEquals( 3, i );
+ assertEquals( 0, reporter.getSuccesses() );
+ assertEquals( 0, reporter.getFailures() );
+ assertEquals( 3, reporter.getWarnings() );
+ }
+
+ public void testWarningMessages()
+ {
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "i'm warning you" );
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "you have to stop now" );
+ processor.addReturnValue( ReportCondition.WARNING, artifact, "all right... that does it!" );
+ processor.processArtifact( model, artifact, reporter, null );
+ Iterator warning = reporter.getArtifactWarningIterator();
+ assertEquals( "i'm warning you", ( (ArtifactResult) warning.next() ).getReason() );
+ assertEquals( "you have to stop now", ( (ArtifactResult) warning.next() ).getReason() );
+ assertEquals( "all right... that does it!", ( (ArtifactResult) warning.next() ).getReason() );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ model = null;
+ processor.clearList();
+ processor = null;
+ reporter = null;
+ super.tearDown();
+ }
+
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Plugin;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+
+import java.util.Iterator;
+
+/**
+ * @todo??? should use MetadataXpp3Reader instead ?
+ */
+public class BadMetadataReportProcessorTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private ArtifactFactory artifactFactory;
+
+ private MetadataReportProcessor badMetadataReportProcessor;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+
+ badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE );
+ }
+
+ public void testMetadataMissingLastUpdated()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ assertEquals( "check metadata", metadata, result.getMetadata() );
+ assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testMetadataValidVersions()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+ versioning.setLastUpdated( "20050611.202020" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertFalse( "check there are no failures", failures.hasNext() );
+ }
+
+ public void testMetadataMissingADirectory()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.setLastUpdated( "20050611.202020" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ assertEquals( "check metadata", metadata, result.getMetadata() );
+ // TODO: should be more robust
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testMetadataInvalidArtifactVersion()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+ versioning.addVersion( "1.0-alpha-3" );
+ versioning.setLastUpdated( "20050611.202020" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ assertEquals( "check metadata", metadata, result.getMetadata() );
+ // TODO: should be more robust
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testMoreThanOneMetadataVersionErrors()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-3" );
+ versioning.setLastUpdated( "20050611.202020" );
+
+ RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ assertEquals( "check metadata", metadata, result.getMetadata() );
+ // TODO: should be more robust
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
+ result.getReason() );
+ assertTrue( "check there is a 2nd failure", failures.hasNext() );
+ result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason",
+ "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testValidPluginMetadata()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertFalse( "check there are no failures", failures.hasNext() );
+ }
+
+ public void testMissingMetadataPlugin()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testIncompletePluginMetadata()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason",
+ "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testInvalidPluginArtifactId()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
+ assertTrue( "check there is a 2nd failure", failures.hasNext() );
+ result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testInvalidPluginPrefix()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Missing or empty plugin prefix for artifactId artifactId.", result.getReason() );
+ assertTrue( "check there is a 2nd failure", failures.hasNext() );
+ result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
+ result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testDuplicatePluginPrefixes()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
+ metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ // TODO: should be more robust
+ assertEquals( "check reason", "Duplicate plugin prefix found: default.", result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ public void testValidSnapshotMetadata()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+
+ Snapshot snapshot = new Snapshot();
+ snapshot.setBuildNumber( 1 );
+ snapshot.setTimestamp( "20050611.202024" );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertFalse( "check there are no failures", failures.hasNext() );
+ }
+
+ public void testInvalidSnapshotMetadata()
+ throws ReportProcessorException
+ {
+ ArtifactReporter reporter = new MockArtifactReporter();
+
+ Artifact artifact =
+ artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
+
+ Snapshot snapshot = new Snapshot();
+ snapshot.setBuildNumber( 2 );
+ snapshot.setTimestamp( "20050611.202024" );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
+
+ badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator failures = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check there is a failure", failures.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
+ assertEquals( "check metadata", metadata, result.getMetadata() );
+ // TODO: should be more robust
+ assertEquals( "check reason", "Snapshot artifact 20050611.202024-2 does not exist.", result.getReason() );
+ assertFalse( "check no more failures", failures.hasNext() );
+ }
+
+ private Plugin createMetadataPlugin( String artifactId, String prefix )
+ {
+ Plugin plugin = new Plugin();
+ plugin.setArtifactId( artifactId );
+ plugin.setName( artifactId );
+ plugin.setPrefix( prefix );
+ return plugin;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import junit.framework.TestCase;
+
+/**
+ *
+ */
+public class CacheTest
+ extends TestCase
+{
+ private Cache cache;
+
+ private static final double CACHE_HIT_RATIO = 0.5;
+
+ private static final double CACHE_HIT_RATIO_THRESHOLD = 0.75;
+
+ public void testCacheManagementBasedOnHitsRatio()
+ {
+ cache = new Cache( CACHE_HIT_RATIO );
+ newCacheObjectTests();
+
+ String key = "key";
+ String value = "value";
+ for ( int ctr = 1; ctr < 10; ctr++ )
+ {
+ cache.put( key + ctr, value + ctr );
+ }
+
+ while ( cache.getHitRate() < CACHE_HIT_RATIO_THRESHOLD )
+ {
+ cache.get( "key2" );
+ }
+ cache.put( "key10", "value10" );
+ assertNull( "first key must be expired", cache.get( "key1" ) );
+ }
+
+ public void testCacheManagementBasedOnCacheSize()
+ {
+ cache = new Cache( 9 );
+ newCacheObjectTests();
+
+ String key = "key";
+ String value = "value";
+ for ( int ctr = 1; ctr < 10; ctr++ )
+ {
+ cache.put( key + ctr, value + ctr );
+ }
+
+ cache.put( "key10", "value10" );
+ assertNull( "first key must be expired", cache.get( "key1" ) );
+ assertEquals( "check cache size to be max size", 9, cache.size() );
+ }
+
+ public void testCacheManagementBasedOnCacheSizeAndHitRate()
+ {
+ cache = new Cache( CACHE_HIT_RATIO, 9 );
+ newCacheObjectTests();
+
+ String key = "key";
+ String value = "value";
+ for ( int ctr = 1; ctr < 5; ctr++ )
+ {
+ cache.put( key + ctr, value + ctr );
+ }
+
+ while ( cache.getHitRate() < CACHE_HIT_RATIO )
+ {
+ cache.get( "key3" );
+ }
+
+ cache.put( "key10", "value10" );
+ assertNull( "first key must be expired", cache.get( "key1" ) );
+
+ while ( cache.getHitRate() >= CACHE_HIT_RATIO )
+ {
+ cache.get( "key11" );
+ }
+
+ for ( int ctr = 5; ctr < 10; ctr++ )
+ {
+ cache.put( key + ctr, value + ctr );
+ }
+
+ cache.put( "key11", "value11" );
+ assertNull( "second key must be expired", cache.get( "key2" ) );
+ assertEquals( "check cache size to be max size", 9, cache.size() );
+ }
+
+ public void testCacheOnRedundantData()
+ {
+ cache = new Cache( CACHE_HIT_RATIO, 9 );
+ newCacheObjectTests();
+
+ String key = "key";
+ String value = "value";
+ for ( int ctr = 1; ctr < 10; ctr++ )
+ {
+ cache.put( key + ctr, value + ctr );
+ }
+
+ cache.put( "key1", "value1" );
+ cache.put( "key10", "value10" );
+ assertNull( "second key must be gone", cache.get( "key2" ) );
+ assertEquals( "check cache size to be max size", 9, cache.size() );
+ }
+
+ private void newCacheObjectTests()
+ {
+ assertEquals( (double) 0, cache.getHitRate(), 0 );
+ assertEquals( "check cache size", 0, cache.size() );
+
+ String value = "value";
+ String key = "key";
+
+ cache.put( key, value );
+ assertEquals( "check cache hit", value, cache.get( key ) );
+ assertEquals( (double) 1, cache.getHitRate(), 0 );
+ assertEquals( "check cache size", 1, cache.size() );
+ assertNull( "check cache miss", cache.get( "none" ) );
+ assertEquals( CACHE_HIT_RATIO, cache.getHitRate(), 0 );
+ cache.clear();
+ assertNull( "check flushed object", cache.get( "key" ) );
+ assertEquals( (double) 0, cache.getHitRate(), 0 );
+ assertEquals( "check flushed cache size", 0, cache.size() );
+ cache.clear();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ */
+public class CachedRepositoryQueryLayerTest
+ extends AbstractRepositoryQueryLayerTestCase
+{
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ queryLayer = new CachedRepositoryQueryLayer( repository );
+ }
+
+ public void testUseFileCache()
+ {
+ testContainsArtifactTrue();
+ assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
+ testContainsArtifactTrue();
+ assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
+ }
+
+ public void testUseMetadataCache()
+ throws Exception
+ {
+ testArtifactVersionsTrue();
+ assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
+ testArtifactVersionsTrue();
+ assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
+ }
+
+ public void testUseFileCacheOnSnapshot()
+ {
+ testContainsSnapshotArtifactTrue();
+ assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
+ testContainsSnapshotArtifactTrue();
+ assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.digest.DigesterException;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.handler.ArtifactHandler;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * This class tests the ChecksumArtifactReporter.
+ * It extends the AbstractChecksumArtifactReporterTestCase class.
+ */
+public class ChecksumArtifactReporterTest
+ extends AbstractChecksumArtifactReporterTestCase
+{
+ private ArtifactReportProcessor artifactReportProcessor;
+
+ private ArtifactReporter reporter = new MockArtifactReporter();
+
+ private MetadataReportProcessor metadataReportProcessor;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
+ metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
+ }
+
+ /**
+ * Test the ChecksumArtifactReporter when the checksum files are valid.
+ */
+ public void testChecksumArtifactReporterSuccess()
+ throws ReportProcessorException, IOException, DigesterException
+ {
+ createChecksumFile( "VALID" );
+ createChecksumFile( "INVALID" );
+
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0" );
+ Artifact artifact =
+ new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 2, reporter.getSuccesses() );
+ }
+
+ /**
+ * Test the ChecksumArtifactReporter when the checksum files are invalid.
+ */
+ public void testChecksumArtifactReporterFailed()
+ throws ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0" );
+ Artifact artifact =
+ new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 2, reporter.getFailures() );
+ }
+
+ /**
+ * Test the valid checksum of a metadata file.
+ * The reporter should report 2 success validation.
+ */
+ public void testChecksumMetadataReporterSuccess()
+ throws ReportProcessorException, DigesterException, IOException
+ {
+ createMetadataFile( "VALID" );
+ createMetadataFile( "INVALID" );
+
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0" );
+ Artifact artifact =
+ new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+
+ //Version level metadata
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ //Artifact level metadata
+ metadata = new ArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ //Group level metadata
+ metadata = new GroupRepositoryMetadata( "checksumTest" );
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator iter = reporter.getRepositoryMetadataSuccessIterator();
+ assertTrue( "check if there is a success", iter.hasNext() );
+ }
+
+ /**
+ * Test the corrupted checksum of a metadata file.
+ * The reporter must report 2 failures.
+ */
+ public void testChecksumMetadataReporterFailure()
+ throws ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0" );
+ Artifact artifact =
+ new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator iter = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check if there is a failure", iter.hasNext() );
+ }
+
+ /**
+ * Test the checksum of an artifact located in a remote location.
+ */
+ /* public void testChecksumArtifactRemote()
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
+ VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
+ Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
+ remoteArtifactType, "", handler );
+ ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
+ new DefaultRepositoryLayout() );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ if ( reporter.getFailures() == 2 )
+ assertTrue( reporter.getFailures() == 2 );
+
+ if ( reporter.getSuccesses() == 2 )
+ assertTrue( reporter.getSuccesses() == 2 );
+
+ }
+ */
+
+ /**
+ * Test the checksum of a metadata file located in a remote location.
+ */
+ /* public void testChecksumMetadataRemote()
+ {
+
+ try
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
+ VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
+ Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version,
+ remoteArtifactScope, remoteArtifactType, "", handler );
+ ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
+ new DefaultRepositoryLayout() );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+ Iterator iter = reporter.getRepositoryMetadataFailureIterator();
+ if ( iter.hasNext() )
+ assertTrue( "check if there is a failure", iter.hasNext() );
+
+ iter = reporter.getRepositoryMetadataSuccessIterator();
+ if ( iter.hasNext() )
+ assertTrue( "check if there is a success", iter.hasNext() );
+
+ }
+ catch ( Exception e )
+ {
+ e.printStackTrace();
+ }
+ }
+ */
+
+ /**
+ * Test the conditional when the checksum files of the artifact & metadata do not exist.
+ */
+ public void testChecksumFilesDoNotExist()
+ throws ReportProcessorException, DigesterException, IOException
+ {
+ createChecksumFile( "VALID" );
+ createMetadataFile( "VALID" );
+ deleteChecksumFiles( "jar" );
+
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0" );
+ Artifact artifact =
+ new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 2, reporter.getFailures() );
+
+ RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
+ metadataReportProcessor.processMetadata( metadata, repository, reporter );
+
+ Iterator iter = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "check if there is a failure", iter.hasNext() );
+
+ deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+import org.apache.maven.artifact.repository.metadata.Versioning;
+
+import java.util.Iterator;
+
+/**
+ *
+ */
+public class DefaultArtifactReporterTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private ArtifactReporter reporter;
+
+ private Artifact artifact;
+
+ private RepositoryMetadata metadata;
+
+ public void testEmptyArtifactReporter()
+ {
+ assertEquals( "No failures", 0, reporter.getFailures() );
+ assertEquals( "No warnings", 0, reporter.getWarnings() );
+ assertEquals( "No successes", 0, reporter.getSuccesses() );
+ assertFalse( "No artifact failures", reporter.getArtifactFailureIterator().hasNext() );
+ assertFalse( "No artifact warnings", reporter.getArtifactWarningIterator().hasNext() );
+ assertFalse( "No artifact successes", reporter.getArtifactSuccessIterator().hasNext() );
+ assertFalse( "No metadata failures", reporter.getRepositoryMetadataFailureIterator().hasNext() );
+ assertFalse( "No metadata warnings", reporter.getRepositoryMetadataWarningIterator().hasNext() );
+ assertFalse( "No metadata successes", reporter.getRepositoryMetadataSuccessIterator().hasNext() );
+ }
+
+ public void testMetadataSingleFailure()
+ {
+ reporter.addFailure( metadata, "Single Failure Reason" );
+ assertEquals( "failures count", 1, reporter.getFailures() );
+ assertEquals( "warnings count", 0, reporter.getWarnings() );
+ assertEquals( "successes count", 0, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "must have failures", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
+ assertFalse( "no more failures", results.hasNext() );
+ }
+
+ public void testMetadataMultipleFailures()
+ {
+ reporter.addFailure( metadata, "First Failure Reason" );
+ reporter.addFailure( metadata, "Second Failure Reason" );
+ assertEquals( "failures count", 2, reporter.getFailures() );
+ assertEquals( "warnings count", 0, reporter.getWarnings() );
+ assertEquals( "successes count", 0, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataFailureIterator();
+ assertTrue( "must have failures", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
+ assertTrue( "must have 2nd failure", results.hasNext() );
+ result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
+ assertFalse( "no more failures", results.hasNext() );
+ }
+
+ public void testMetadataSingleWarning()
+ {
+ reporter.addWarning( metadata, "Single Warning Message" );
+ assertEquals( "failures count", 0, reporter.getFailures() );
+ assertEquals( "warnings count", 1, reporter.getWarnings() );
+ assertEquals( "successes count", 0, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataWarningIterator();
+ assertTrue( "must have failures", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
+ assertFalse( "no more failures", results.hasNext() );
+ }
+
+ public void testMetadataMultipleWarnings()
+ {
+ reporter.addWarning( metadata, "First Warning" );
+ reporter.addWarning( metadata, "Second Warning" );
+ assertEquals( "failures count", 0, reporter.getFailures() );
+ assertEquals( "warnings count", 2, reporter.getWarnings() );
+ assertEquals( "successes count", 0, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataWarningIterator();
+ assertTrue( "must have warnings", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "First Warning", result.getReason() );
+ assertTrue( "must have 2nd warning", results.hasNext() );
+ result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check failure cause", metadata, result.getMetadata() );
+ assertEquals( "check failure reason", "Second Warning", result.getReason() );
+ assertFalse( "no more failures", results.hasNext() );
+ }
+
+ public void testMetadataSingleSuccess()
+ {
+ reporter.addSuccess( metadata );
+ assertEquals( "failures count", 0, reporter.getFailures() );
+ assertEquals( "warnings count", 0, reporter.getWarnings() );
+ assertEquals( "successes count", 1, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataSuccessIterator();
+ assertTrue( "must have successes", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check success metadata", metadata, result.getMetadata() );
+ assertNull( "check no reason", result.getReason() );
+ assertFalse( "no more failures", results.hasNext() );
+ }
+
+ public void testMetadataMultipleSuccesses()
+ {
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-beta-1" );
+ versioning.addVersion( "1.0-beta-2" );
+ RepositoryMetadata metadata2 = new ArtifactRepositoryMetadata( artifact, versioning );
+
+ reporter.addSuccess( metadata );
+ reporter.addSuccess( metadata2 );
+ assertEquals( "failures count", 0, reporter.getFailures() );
+ assertEquals( "warnings count", 0, reporter.getWarnings() );
+ assertEquals( "successes count", 2, reporter.getSuccesses() );
+
+ Iterator results = reporter.getRepositoryMetadataSuccessIterator();
+ assertTrue( "must have successes", results.hasNext() );
+ RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check success metadata", metadata, result.getMetadata() );
+ assertNull( "check no reason", result.getReason() );
+ assertTrue( "must have 2nd success", results.hasNext() );
+ result = (RepositoryMetadataResult) results.next();
+ assertEquals( "check success metadata", metadata2, result.getMetadata() );
+ assertNull( "check no reason", result.getReason() );
+ assertFalse( "no more successes", results.hasNext() );
+ }
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ reporter = new DefaultArtifactReporter();
+ ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
+
+ Versioning versioning = new Versioning();
+ versioning.addVersion( "1.0-alpha-1" );
+ versioning.addVersion( "1.0-alpha-2" );
+ }
+
+ protected void tearDown()
+ throws Exception
+ {
+ super.tearDown();
+
+ reporter = null;
+ metadata = null;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.record.RepositoryIndexRecordFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.model.Model;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.File;
+import java.util.Collections;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class DuplicateArtifactFileReportProcessorTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private Artifact artifact;
+
+ private Model model;
+
+ private ArtifactReportProcessor processor;
+
+ private ArtifactFactory artifactFactory;
+
+ File indexDirectory;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ indexDirectory = getTestFile( "target/indexDirectory" );
+ FileUtils.deleteDirectory( indexDirectory );
+
+ artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
+ artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
+ model = new Model();
+
+ RepositoryArtifactIndexFactory factory =
+ (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
+
+ RepositoryArtifactIndex index = factory.createStandardIndex( indexDirectory );
+
+ RepositoryIndexRecordFactory recordFactory =
+ (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
+
+ index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
+
+ processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
+ }
+
+ public void testNullArtifactFile()
+ throws Exception
+ {
+ artifact.setFile( null );
+
+ MockArtifactReporter reporter = new MockArtifactReporter();
+
+ processor.processArtifact( model, artifact, reporter, repository );
+
+ assertEquals( "Check no successes", 0, reporter.getSuccesses() );
+ assertEquals( "Check warnings", 1, reporter.getWarnings() );
+ assertEquals( "Check no failures", 0, reporter.getFailures() );
+ }
+
+ public void testSuccessOnAlreadyIndexedArtifact()
+ throws Exception
+ {
+ MockArtifactReporter reporter = new MockArtifactReporter();
+
+ processor.processArtifact( model, artifact, reporter, repository );
+
+ assertEquals( "Check no successes", 1, reporter.getSuccesses() );
+ assertEquals( "Check warnings", 0, reporter.getWarnings() );
+ assertEquals( "Check no failures", 0, reporter.getFailures() );
+ }
+
+ public void testSuccessOnDifferentGroupId()
+ throws Exception
+ {
+ MockArtifactReporter reporter = new MockArtifactReporter();
+
+ artifact.setGroupId( "different.groupId" );
+ processor.processArtifact( model, artifact, reporter, repository );
+
+ assertEquals( "Check no successes", 1, reporter.getSuccesses() );
+ assertEquals( "Check warnings", 0, reporter.getWarnings() );
+ assertEquals( "Check no failures", 0, reporter.getFailures() );
+ }
+
+ public void testSuccessOnNewArtifact()
+ throws Exception
+ {
+ Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
+
+ MockArtifactReporter reporter = new MockArtifactReporter();
+
+ processor.processArtifact( model, newArtifact, reporter, repository );
+
+ assertEquals( "Check no successes", 1, reporter.getSuccesses() );
+ assertEquals( "Check warnings", 0, reporter.getWarnings() );
+ assertEquals( "Check no failures", 0, reporter.getFailures() );
+ }
+
+ public void testFailure()
+ throws Exception
+ {
+ Artifact duplicate = createArtifact( artifact.getGroupId(), "snapshot-artifact", "1.0-alpha-1-SNAPSHOT",
+ artifact.getVersion(), artifact.getType() );
+ duplicate.setFile( artifact.getFile() );
+
+ MockArtifactReporter reporter = new MockArtifactReporter();
+
+ processor.processArtifact( model, duplicate, reporter, repository );
+
+ assertEquals( "Check no successes", 0, reporter.getSuccesses() );
+ assertEquals( "Check warnings", 0, reporter.getWarnings() );
+ assertEquals( "Check no failures", 1, reporter.getFailures() );
+ }
+
+ private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
+ String type )
+ {
+ Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
+ artifact.setBaseVersion( baseVersion );
+ artifact.setRepository( repository );
+ artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
+ return artifact;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Edwin Punzalan
+ */
+public class GenericMockObject
+ implements InvocationHandler
+{
+ private Map invocations = new HashMap();
+
+ public GenericMockObject()
+ {
+ //default constructor
+ }
+
+ public GenericMockObject( Map returnMap )
+ {
+ invocations = new HashMap( returnMap );
+ }
+
+ public void setExpectedReturns( Method method, List returnList )
+ {
+ invocations.put( method, returnList );
+ }
+
+ public Object invoke( Object proxy, Method method, Object[] args )
+ {
+ if ( !invocations.containsKey( method ) )
+ {
+ throw new UnsupportedOperationException( "No expected return values defined." );
+ }
+
+ List returnList = (List) invocations.get( method );
+ if ( returnList.size() < 1 )
+ {
+ throw new UnsupportedOperationException( "Too few expected return values defined." );
+ }
+ return returnList.remove( 0 );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.handler.ArtifactHandler;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * This class tests the InvalidPomArtifactReportProcessor class.
+ */
+public class InvalidPomArtifactReportProcessorTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private ArtifactReportProcessor artifactReportProcessor;
+
+ private ArtifactReporter reporter = new MockArtifactReporter();
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
+ }
+
+ /**
+ * Test the InvalidPomArtifactReportProcessor when the artifact is an invalid pom.
+ */
+ public void testInvalidPomArtifactReportProcessorFailure()
+ throws ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
+ VersionRange version = VersionRange.createFromVersion( "1.0-alpha-3" );
+ Artifact artifact =
+ new DefaultArtifact( "org.apache.maven", "artifactId", version, "compile", "pom", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 1, reporter.getFailures() );
+ }
+
+
+ /**
+ * Test the InvalidPomArtifactReportProcessor when the artifact is a valid pom.
+ */
+ public void testInvalidPomArtifactReportProcessorSuccess()
+ throws ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
+ VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
+ Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "pom", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 1, reporter.getSuccesses() );
+ }
+
+
+ /**
+ * Test the InvalidPomArtifactReportProcessor when the artifact is not a pom.
+ */
+ public void testNotAPomArtifactReportProcessorSuccess()
+ throws ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
+ Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
+
+ artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
+ assertEquals( 1, reporter.getWarnings() );
+ }
+
+ /**
+ * Test the InvalidPomArtifactReportProcessor when the pom is located in
+ * a remote repository.
+ */
+ /* public void testRemotePomArtifactReportProcessorSuccess(){
+ try{
+ ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
+ VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
+ Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
+ "pom", "", handler );
+ ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
+ new DefaultRepositoryLayout() );
+
+ artifactReportProcessor.processArtifact(null, artifact, reporter, repository);
+ if(reporter.getSuccesses() == 1)
+ assertTrue(reporter.getSuccesses() == 1);
+
+ }catch(Exception e){
+
+ }
+ }
+ */
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.DefaultArtifact;
+import org.apache.maven.artifact.handler.ArtifactHandler;
+import org.apache.maven.artifact.handler.DefaultArtifactHandler;
+import org.apache.maven.artifact.versioning.VersionRange;
+import org.apache.maven.model.Model;
+import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * This class tests the LocationArtifactReportProcessor.
+ */
+public class LocationArtifactReportProcessorTest
+ extends AbstractRepositoryReportsTestCase
+{
+ private ArtifactReportProcessor artifactReportProcessor;
+
+ private ArtifactReporter reporter = new MockArtifactReporter();
+
+ private MavenXpp3Reader pomReader;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+ artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
+ pomReader = new MavenXpp3Reader();
+ }
+
+ public void tearDown()
+ throws Exception
+ {
+ super.tearDown();
+ artifactReportProcessor = null;
+ pomReader = null;
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact's physical location matches the location specified
+ * both in the file system pom and in the pom included in the package.
+ */
+ public void testPackagedPomLocationArtifactReporterSuccess()
+ throws ReportProcessorException, IOException, XmlPullParserException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "2.0" );
+ Artifact artifact =
+ new DefaultArtifact( "org.apache.maven", "maven-model", version, "compile", "jar", "", handler );
+
+ String path = "org/apache/maven/maven-model/2.0/maven-model-2.0.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getSuccesses() );
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact is in the location specified in the
+ * file system pom (but the jar file does not have a pom included in its package).
+ */
+ public void testLocationArtifactReporterSuccess()
+ throws ReportProcessorException, IOException, XmlPullParserException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
+ Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
+
+ String path = "groupId/artifactId/1.0-alpha-1/artifactId-1.0-alpha-1.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getSuccesses() );
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact is not in the location specified
+ * in the file system pom.
+ */
+ public void testLocationArtifactReporterFailure()
+ throws IOException, XmlPullParserException, ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
+ Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
+
+ String path = "groupId/artifactId/1.0-alpha-2/artifactId-1.0-alpha-2.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getFailures() );
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact's physical location does not match the
+ * location in the file system pom but instead matches the specified location in the packaged pom.
+ */
+ public void testFsPomArtifactMatchFailure()
+ throws IOException, ReportProcessorException, XmlPullParserException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "2.0" );
+ Artifact artifact =
+ new DefaultArtifact( "org.apache.maven", "maven-archiver", version, "compile", "jar", "", handler );
+
+ String path = "org/apache/maven/maven-archiver/2.0/maven-archiver-2.0.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getFailures() );
+ }
+
+ private Model readPom( String path )
+ throws IOException, XmlPullParserException
+ {
+ Reader reader = new FileReader( new File( repository.getBasedir(), path ) );
+ Model model = pomReader.read( reader );
+ // hokey inheritence to avoid some errors right now
+ if ( model.getGroupId() == null )
+ {
+ model.setGroupId( model.getParent().getGroupId() );
+ }
+ if ( model.getVersion() == null )
+ {
+ model.setVersion( model.getParent().getVersion() );
+ }
+ return model;
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact's physical location does not match the
+ * location specified in the packaged pom but matches the location specified in the file system pom.
+ */
+ public void testPkgPomArtifactMatchFailure()
+ throws IOException, XmlPullParserException, ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "2.1" );
+ Artifact artifact =
+ new DefaultArtifact( "org.apache.maven", "maven-monitor", version, "compile", "jar", "", handler );
+
+ String path = "org/apache/maven/maven-monitor/2.1/maven-monitor-2.1.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getFailures() );
+ }
+
+ /**
+ * Test the LocationArtifactReporter when the artifact's physical location does not match both the
+ * location specified in the packaged pom and the location specified in the file system pom.
+ */
+ public void testBothPomArtifactMatchFailure()
+ throws IOException, XmlPullParserException, ReportProcessorException
+ {
+ ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
+ VersionRange version = VersionRange.createFromVersion( "2.1" );
+ Artifact artifact =
+ new DefaultArtifact( "org.apache.maven", "maven-project", version, "compile", "jar", "", handler );
+
+ String path = "org/apache/maven/maven-project/2.1/maven-project-2.1.pom";
+ Model model = readPom( path );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ assertEquals( 1, reporter.getFailures() );
+ }
+
+ /**
+ * Test the LocationArtifactReportProcessor when the artifact is located in the remote repository.
+ */
+ /* public void testRemoteArtifactReportProcessorFailure()
+ {
+
+ ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
+ VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
+ Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
+ remoteArtifactType, "", handler );
+ ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
+ new DefaultRepositoryLayout() );
+ try
+ {
+ URL url = new URL( remoteRepoUrl + remoteArtifactGroup + "/" + remoteArtifactId + "/"
+ + remoteArtifactVersion + "/" + remoteArtifactId + "-" + remoteArtifactVersion + ".pom" );
+ InputStream is = url.openStream();
+ Reader reader = new InputStreamReader( is );
+ Model model = pomReader.read( reader );
+
+ artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
+ if ( reporter.getFailures() > 0 )
+ assertTrue( reporter.getFailures() == 1 );
+
+ if ( reporter.getSuccesses() > 0 )
+ assertTrue( reporter.getSuccesses() == 1 );
+
+ }
+ catch ( Exception e )
+ {
+ e.printStackTrace();
+ }
+ }
+ */
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.handler.ArtifactHandler;
+import org.apache.maven.artifact.metadata.ArtifactMetadata;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
+import org.apache.maven.artifact.versioning.ArtifactVersion;
+import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+import java.io.File;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * @noinspection ReturnOfNull
+ */
+public class MockArtifact
+ implements Artifact
+{
+ private String groupId;
+
+ private String artifactId;
+
+ private String version;
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public void setVersion( String s )
+ {
+ version = s;
+ }
+
+ public String getScope()
+ {
+ return null;
+ }
+
+ public String getType()
+ {
+ return null;
+ }
+
+ public String getClassifier()
+ {
+ return null;
+ }
+
+ public boolean hasClassifier()
+ {
+ return false;
+ }
+
+ public File getFile()
+ {
+ return null;
+ }
+
+ public void setFile( File file )
+ {
+ }
+
+ public String getBaseVersion()
+ {
+ return null;
+ }
+
+ public void setBaseVersion( String s )
+ {
+ }
+
+ public String getId()
+ {
+ return null;
+ }
+
+ public String getDependencyConflictId()
+ {
+ return null;
+ }
+
+ public void addMetadata( ArtifactMetadata artifactMetadata )
+ {
+ }
+
+ public Collection getMetadataList()
+ {
+ return null;
+ }
+
+ public void setRepository( ArtifactRepository artifactRepository )
+ {
+ }
+
+ public ArtifactRepository getRepository()
+ {
+ return null;
+ }
+
+ public void updateVersion( String s, ArtifactRepository artifactRepository )
+ {
+ }
+
+ public String getDownloadUrl()
+ {
+ return null;
+ }
+
+ public void setDownloadUrl( String s )
+ {
+ }
+
+ public ArtifactFilter getDependencyFilter()
+ {
+ return null;
+ }
+
+ public void setDependencyFilter( ArtifactFilter artifactFilter )
+ {
+ }
+
+ public ArtifactHandler getArtifactHandler()
+ {
+ return null;
+ }
+
+ public List getDependencyTrail()
+ {
+ return null;
+ }
+
+ public void setDependencyTrail( List list )
+ {
+ }
+
+ public void setScope( String s )
+ {
+ }
+
+ public VersionRange getVersionRange()
+ {
+ return null;
+ }
+
+ public void setVersionRange( VersionRange versionRange )
+ {
+ }
+
+ public void selectVersion( String s )
+ {
+ }
+
+ public void setGroupId( String s )
+ {
+ groupId = s;
+ }
+
+ public void setArtifactId( String s )
+ {
+ artifactId = s;
+ }
+
+ public boolean isSnapshot()
+ {
+ return false;
+ }
+
+ public void setResolved( boolean b )
+ {
+ }
+
+ public boolean isResolved()
+ {
+ return false;
+ }
+
+ public void setResolvedVersion( String s )
+ {
+ }
+
+ public void setArtifactHandler( ArtifactHandler artifactHandler )
+ {
+ }
+
+ public boolean isRelease()
+ {
+ return false;
+ }
+
+ public void setRelease( boolean b )
+ {
+ }
+
+ public List getAvailableVersions()
+ {
+ return null;
+ }
+
+ public void setAvailableVersions( List list )
+ {
+ }
+
+ public boolean isOptional()
+ {
+ return false;
+ }
+
+ public ArtifactVersion getSelectedVersion()
+ throws OverConstrainedVersionException
+ {
+ return null;
+ }
+
+ public boolean isSelectedVersionKnown()
+ throws OverConstrainedVersionException
+ {
+ return false;
+ }
+
+ public int compareTo( Object o )
+ {
+ return 0;
+ }
+
+ public void setOptional( boolean b )
+ {
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.versioning.VersionRange;
+
+/**
+ * @noinspection ReturnOfNull
+ */
+public class MockArtifactFactory
+ implements ArtifactFactory
+{
+ public Artifact createArtifact( String s, String s1, String s2, String s3, String s4 )
+ {
+ return null;
+ }
+
+ public Artifact createArtifactWithClassifier( String s, String s1, String s2, String s3, String s4 )
+ {
+ return null;
+ }
+
+ public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
+ String s4 )
+ {
+ return null;
+ }
+
+ public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
+ String s4, String s5 )
+ {
+ return null;
+ }
+
+ public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
+ String s4, String s5, boolean b )
+ {
+ return null;
+ }
+
+ public Artifact createBuildArtifact( String s, String s1, String s2, String s3 )
+ {
+ return null;
+ }
+
+ public Artifact createProjectArtifact( String s, String s1, String s2 )
+ {
+ return null;
+ }
+
+ public Artifact createParentArtifact( String s, String s1, String s2 )
+ {
+ return null;
+ }
+
+ public Artifact createPluginArtifact( String s, String s1, VersionRange versionRange )
+ {
+ return null;
+ }
+
+ public Artifact createProjectArtifact( String s, String s1, String s2, String s3 )
+ {
+ return null;
+ }
+
+ public Artifact createExtensionArtifact( String s, String s1, VersionRange versionRange )
+ {
+ return null;
+ }
+
+ public Artifact createDependencyArtifact( String string, String string1, VersionRange versionRange, String string2,
+ String string3, String string4, boolean b )
+ {
+ return null;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ *
+ */
+public class MockArtifactReportProcessor
+ implements ArtifactReportProcessor
+{
+ private List reportConditions;
+
+ private Iterator iterator;
+
+ public MockArtifactReportProcessor()
+ {
+ reportConditions = new ArrayList();
+ }
+
+ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
+ ArtifactRepository repository )
+ {
+ if ( iterator == null || !iterator.hasNext() ) // not initialized or reached end of the list. start again
+ {
+ iterator = reportConditions.iterator();
+ }
+ if ( !reportConditions.isEmpty() )
+ {
+ while ( iterator.hasNext() )
+ {
+ ReportCondition reportCondition = (ReportCondition) iterator.next();
+ int i = reportCondition.getResult();
+ if ( i == ReportCondition.SUCCESS )
+ {
+ reporter.addSuccess( reportCondition.getArtifact() );
+ }
+ else if ( i == ReportCondition.WARNING )
+ {
+ reporter.addWarning( reportCondition.getArtifact(), reportCondition.getReason() );
+ }
+ else if ( i == ReportCondition.FAILURE )
+ {
+ reporter.addFailure( reportCondition.getArtifact(), reportCondition.getReason() );
+ }
+ }
+ }
+ }
+
+ public void addReturnValue( int result, Artifact artifact, String reason )
+ {
+ reportConditions.add( new ReportCondition( result, artifact, reason ) );
+ }
+
+ public void clearList()
+ {
+ reportConditions.clear();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Mock implementation of the artifact reporter.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @version $Id$
+ */
+public class MockArtifactReporter
+ implements ArtifactReporter
+{
+ private List artifactFailures = new ArrayList();
+
+ private List artifactSuccesses = new ArrayList();
+
+ private List artifactWarnings = new ArrayList();
+
+ private List metadataFailures = new ArrayList();
+
+ private List metadataSuccesses = new ArrayList();
+
+ private List metadataWarnings = new ArrayList();
+
+ public void addFailure( Artifact artifact, String reason )
+ {
+ artifactFailures.add( new ArtifactResult( artifact, reason ) );
+ }
+
+ public void addSuccess( Artifact artifact )
+ {
+ artifactSuccesses.add( new ArtifactResult( artifact ) );
+ }
+
+ public void addWarning( Artifact artifact, String message )
+ {
+ artifactWarnings.add( new ArtifactResult( artifact, message ) );
+ }
+
+ public void addFailure( RepositoryMetadata metadata, String reason )
+ {
+ metadataFailures.add( new RepositoryMetadataResult( metadata, reason ) );
+ }
+
+ public void addSuccess( RepositoryMetadata metadata )
+ {
+ metadataSuccesses.add( new RepositoryMetadataResult( metadata ) );
+ }
+
+ public void addWarning( RepositoryMetadata metadata, String message )
+ {
+ metadataWarnings.add( new RepositoryMetadataResult( metadata, message ) );
+ }
+
+ public Iterator getArtifactFailureIterator()
+ {
+ return artifactFailures.iterator();
+ }
+
+ public Iterator getArtifactSuccessIterator()
+ {
+ return artifactSuccesses.iterator();
+ }
+
+ public Iterator getArtifactWarningIterator()
+ {
+ return artifactWarnings.iterator();
+ }
+
+ public Iterator getRepositoryMetadataFailureIterator()
+ {
+ return metadataFailures.iterator();
+ }
+
+ public Iterator getRepositoryMetadataSuccessIterator()
+ {
+ return metadataSuccesses.iterator();
+ }
+
+ public Iterator getRepositoryMetadataWarningIterator()
+ {
+ return metadataWarnings.iterator();
+ }
+
+ public int getFailures()
+ {
+ return artifactFailures.size();
+ }
+
+ public int getSuccesses()
+ {
+ return artifactSuccesses.size();
+ }
+
+ public int getWarnings()
+ {
+ return artifactWarnings.size();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.repository.metadata.Snapshot;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ *
+ */
+public class MockRepositoryQueryLayer
+ implements RepositoryQueryLayer
+{
+ private List queryConditions;
+
+ private Iterator iterator;
+
+ public MockRepositoryQueryLayer()
+ {
+ queryConditions = new ArrayList();
+ }
+
+ public boolean containsArtifact( Artifact artifact )
+ {
+ if ( iterator == null || !iterator.hasNext() ) // not initialized or reached end of the list. start again
+ {
+ iterator = queryConditions.iterator();
+ }
+ boolean b;
+ if ( queryConditions.isEmpty() )
+ {
+ b = false;
+ }
+ else
+ {
+ b = ( (Boolean) iterator.next() ).booleanValue();
+ }
+ return b;
+ }
+
+ public void addReturnValue( boolean queryCondition )
+ {
+ queryConditions.add( Boolean.valueOf( queryCondition ) );
+ }
+
+ public void clearList()
+ {
+ queryConditions.clear();
+ }
+
+ public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
+ {
+ return containsArtifact( artifact );
+ }
+
+ public List getVersions( Artifact artifact )
+ {
+ return Collections.EMPTY_LIST;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.artifact.Artifact;
+
+/**
+ *
+ */
+public class ReportCondition
+{
+ public static final int SUCCESS = 0;
+
+ public static final int FAILURE = -1;
+
+ public static final int WARNING = 1;
+
+ private int result;
+
+ private Artifact artifact;
+
+ private String reason;
+
+ public ReportCondition( int result, Artifact artifact, String reason )
+ {
+ this.result = result;
+ this.artifact = artifact;
+ this.reason = reason;
+ }
+
+ public int getResult()
+ {
+ return result;
+ }
+
+ public void setResult( int result )
+ {
+ this.result = result;
+ }
+
+ public Artifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public void setArtifact( Artifact artifact )
+ {
+ this.artifact = artifact;
+ }
+
+ public String getReason()
+ {
+ return reason;
+ }
+
+ public void setReason( String reason )
+ {
+ this.reason = reason;
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.digest.Digester;
-import org.apache.maven.repository.digest.DigesterException;
-import org.codehaus.plexus.util.FileUtils;
-import org.codehaus.plexus.util.IOUtil;
-
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.util.jar.JarEntry;
-import java.util.jar.JarOutputStream;
-
-/**
- * This class creates the artifact and metadata files used for testing the ChecksumArtifactReporter.
- * It is extended by ChecksumArtifactReporterTest class.
- */
-public abstract class AbstractChecksumArtifactReporterTestCase
- extends AbstractRepositoryReportsTestCase
-{
- private static final String[] validArtifactChecksumJars = {"validArtifact-1.0"};
-
- private static final String[] invalidArtifactChecksumJars = {"invalidArtifact-1.0"};
-
- private static final String metadataChecksumFilename = "maven-metadata";
-
- private Digester sha1Digest;
-
- private Digester md5Digest;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
- md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
- }
-
- /**
- * Create checksum files.
- *
- * @param type The type of checksum file to be created.
- */
- protected void createChecksumFile( String type )
- throws DigesterException, IOException
- {
- //loop through the valid artifact names..
- if ( "VALID".equals( type ) )
- {
- for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
- {
- writeChecksumFile( "checksumTest/", validArtifactChecksumJars[i], "jar", true );
- }
- }
- else if ( "INVALID".equals( type ) )
- {
- for ( int i = 0; i < invalidArtifactChecksumJars.length; i++ )
- {
- writeChecksumFile( "checksumTest/", invalidArtifactChecksumJars[i], "jar", false );
- }
- }
- }
-
- /**
- * Create checksum files for metadata.
- *
- * @param type The type of checksum to be created. (Valid or invalid)
- */
- protected void createMetadataFile( String type )
- throws DigesterException, IOException
- {
- //loop through the valid artifact names..
- if ( "VALID".equals( type ) )
- {
- writeMetadataFile( "checksumTest/validArtifact/1.0/", metadataChecksumFilename, "xml", true );
- writeMetadataFile( "checksumTest/validArtifact/", metadataChecksumFilename, "xml", true );
- writeMetadataFile( "checksumTest/", metadataChecksumFilename, "xml", true );
- }
- else if ( "INVALID".equals( type ) )
- {
- writeMetadataFile( "checksumTest/invalidArtifact/1.0/", metadataChecksumFilename, "xml", false );
- }
- }
-
- /**
- * Create artifact together with its checksums.
- *
- * @param relativePath The groupId
- * @param filename The filename of the artifact to be created.
- * @param type The file type (JAR)
- * @param isValid Indicates whether the checksum to be created is valid or not.
- */
- private void writeChecksumFile( String relativePath, String filename, String type, boolean isValid )
- throws IOException, DigesterException
- {
- //Initialize variables for creating jar files
- String repoUrl = repository.getBasedir();
-
- String dirs = filename.replace( '-', '/' );
- //create the group level directory of the artifact
- File dirFiles = new File( repoUrl + relativePath + dirs );
-
- if ( dirFiles.mkdirs() )
- {
- // create a jar file
- String path = repoUrl + relativePath + dirs + "/" + filename + "." + type;
- FileOutputStream f = new FileOutputStream( path );
- JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
-
- // jar sample.txt
- String filename1 = repoUrl + relativePath + dirs + "/sample.txt";
- createSampleFile( filename1 );
-
- BufferedReader in = new BufferedReader( new FileReader( filename1 ) );
- out.putNextEntry( new JarEntry( filename1 ) );
- IOUtil.copy( in, out );
- in.close();
- out.close();
-
- //Create md5 and sha-1 checksum files..
-
- File file = new File( path + ".md5" );
- OutputStream os = new FileOutputStream( file );
- OutputStreamWriter osw = new OutputStreamWriter( os );
- String sum = md5Digest.calc( new File( path ) );
- if ( !isValid )
- {
- osw.write( sum + "1" );
- }
- else
- {
- osw.write( sum );
- }
- osw.close();
-
- file = new File( path + ".sha1" );
- os = new FileOutputStream( file );
- osw = new OutputStreamWriter( os );
- String sha1sum = sha1Digest.calc( new File( path ) );
- if ( !isValid )
- {
- osw.write( sha1sum + "2" );
- }
- else
- {
- osw.write( sha1sum );
- }
- osw.close();
- }
- }
-
- /**
- * Create metadata file together with its checksums.
- *
- * @param relativePath The groupId
- * @param filename The filename of the artifact to be created.
- * @param type The file type (JAR)
- * @param isValid Indicates whether the checksum to be created is valid or not.
- */
- private void writeMetadataFile( String relativePath, String filename, String type, boolean isValid )
- throws IOException, DigesterException
- {
- //create checksum for the metadata file..
- String repoUrl = repository.getBasedir();
- String url = repository.getBasedir() + "/" + filename + "." + type;
-
- String path = repoUrl + relativePath + filename + "." + type;
- FileUtils.copyFile( new File( url ), new File( path ) );
-
- //Create md5 and sha-1 checksum files..
- File file = new File( path + ".md5" );
- OutputStream os = new FileOutputStream( file );
- OutputStreamWriter osw = new OutputStreamWriter( os );
- String md5sum = md5Digest.calc( new File( path ) );
- if ( !isValid )
- {
- osw.write( md5sum + "1" );
- }
- else
- {
- osw.write( md5sum );
- }
- osw.close();
-
- file = new File( path + ".sha1" );
- os = new FileOutputStream( file );
- osw = new OutputStreamWriter( os );
- String sha1sum = sha1Digest.calc( new File( path ) );
- if ( !isValid )
- {
- osw.write( sha1sum + "2" );
- }
- else
- {
- osw.write( sha1sum );
- }
- osw.close();
- }
-
- /**
- * Create the sample file that will be included in the jar.
- *
- * @param filename
- */
- private void createSampleFile( String filename )
- throws IOException
- {
- File file = new File( filename );
- OutputStream os = new FileOutputStream( file );
- OutputStreamWriter osw = new OutputStreamWriter( os );
- osw.write( "This is the content of the sample file that will be included in the jar file." );
- osw.close();
- }
-
- /**
- * Delete the test directory created in the repository.
- *
- * @param dir The directory to be deleted.
- */
- protected void deleteTestDirectory( File dir )
- {
- try
- {
- FileUtils.deleteDirectory( dir );
- }
- catch ( IOException e )
- {
- // ignore
- }
- }
-
- private void deleteFile( String filename )
- {
- File f = new File( filename );
- f.delete();
- }
-
- protected void deleteChecksumFiles( String type )
- {
- //delete valid checksum files of artifacts created
- for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
- {
- deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
- "/" + validArtifactChecksumJars[i] + "." + type + ".md5" );
-
- deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
- "/" + validArtifactChecksumJars[i] + "." + type + ".sha1" );
- }
-
- //delete valid checksum files of metadata file
- for ( int i = 0; i < validArtifactChecksumJars.length; i++ )
- {
- deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
- "/" + metadataChecksumFilename + ".xml.md5" );
-
- deleteFile( repository.getBasedir() + "checksumTest/" + validArtifactChecksumJars[i].replace( '-', '/' ) +
- "/" + metadataChecksumFilename + ".xml.sha1" );
- }
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.util.List;
-
-/**
- *
- */
-public abstract class AbstractRepositoryQueryLayerTestCase
- extends PlexusTestCase
-{
- private ArtifactFactory artifactFactory;
-
- protected ArtifactRepository repository;
-
- protected CachedRepositoryQueryLayer queryLayer;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- File repositoryDirectory = getTestFile( "src/test/repository" );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- repository =
- factory.createArtifactRepository( "test", repositoryDirectory.toURL().toString(), layout, null, null );
- }
-
- public void testContainsArtifactTrue()
- {
- Artifact artifact = getArtifact( "groupId", "artifactId", "1.0-alpha-1" );
-
- assertTrue( "check artifact", queryLayer.containsArtifact( artifact ) );
- }
-
- public void testContainsArtifactFalse()
- {
- Artifact artifact = getArtifact( "groupId", "artifactId", "1.0-beta-1" );
-
- assertFalse( "check non-existent artifact", queryLayer.containsArtifact( artifact ) );
- }
-
- public void testContainsSnapshotArtifactTrue()
- {
- Snapshot snapshot = new Snapshot();
- snapshot.setTimestamp( "20050611.202024" );
- snapshot.setBuildNumber( 1 );
-
- Artifact artifact = getArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT" );
- assertTrue( "check for snapshot artifact", queryLayer.containsArtifact( artifact, snapshot ) );
- }
-
- public void testContainsSnapshotArtifactFalse()
- {
- Snapshot snapshot = new Snapshot();
- snapshot.setTimestamp( "20050611.202024" );
- snapshot.setBuildNumber( 2 );
-
- Artifact artifact = getArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT" );
- assertFalse( "check for non-existent snapshot artifact", queryLayer.containsArtifact( artifact, snapshot ) );
- }
-
- public void testArtifactVersionsTrue()
- throws Exception
- {
- Artifact artifact = getArtifact( "groupId", "artifactId", "ignored" );
-
- List versions = queryLayer.getVersions( artifact );
-
- assertTrue( "check version 1.0-alpha-1", versions.contains( "1.0-alpha-1" ) );
- assertTrue( "check version 1.0-alpha-2", versions.contains( "1.0-alpha-2" ) );
- assertFalse( "check version 1.0-alpha-3", versions.contains( "1.0-alpha-3" ) );
- }
-
- public void testArtifactVersionsFalse()
- throws Exception
- {
- Artifact artifact = getArtifact( "groupId", "artifactId", "ignored" );
-
- List versions = queryLayer.getVersions( artifact );
-
- assertTrue( "check version 1.0-alpha-1", versions.contains( "1.0-alpha-1" ) );
- assertTrue( "check version 1.0-alpha-2", versions.contains( "1.0-alpha-2" ) );
- assertFalse( "check version 1.0-alpha-3", versions.contains( "1.0-alpha-3" ) );
- }
-
- public void testArtifactVersionsError()
- {
- Artifact artifact = getArtifact( "groupId", "none", "ignored" );
-
- try
- {
- queryLayer.getVersions( artifact );
- fail( "expected error not thrown" );
- }
- catch ( RepositoryQueryLayerException e )
- {
- //expected
- }
- }
-
- private Artifact getArtifact( String groupId, String artifactId, String version )
- {
- return artifactFactory.createBuildArtifact( groupId, artifactId, version, "pom" );
- }
-
- protected void tearDown()
- throws Exception
- {
- release( artifactFactory );
- super.tearDown();
- artifactFactory = null;
- repository = null;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
-import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-
-/**
- *
- */
-public abstract class AbstractRepositoryReportsTestCase
- extends PlexusTestCase
-{
- /**
- * This should only be used for the few that can't use the query layer.
- */
- protected ArtifactRepository repository;
-
- protected static final String remoteRepoUrl = "http://public.planetmirror.com/pub/maven2/";
-
- protected static final String remoteArtifactGroup = "HTTPClient";
-
- protected static final String remoteArtifactId = "HTTPClient";
-
- protected static final String remoteArtifactVersion = "0.3-3";
-
- protected static final String remoteArtifactScope = "compile";
-
- protected static final String remoteArtifactType = "jar";
-
- protected static final String remoteRepoId = "remote-repo";
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- File repositoryDirectory = getTestFile( "src/test/repository" );
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- repository = factory.createArtifactRepository( "repository", repositoryDirectory.toURL().toString(), layout,
- null, null );
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.model.Dependency;
-import org.apache.maven.model.Model;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class ArtifactReportProcessorTest
- extends AbstractRepositoryReportsTestCase
-{
- private static final String EMPTY_STRING = "";
-
- private static final String VALID = "temp";
-
- private MockArtifactReporter reporter;
-
- private Artifact artifact;
-
- private Model model;
-
- private DefaultArtifactReportProcessor processor;
-
- private static final boolean ARTIFACT_FOUND = true;
-
- private static final boolean ARTIFACT_NOT_FOUND = false;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- reporter = new MockArtifactReporter();
- artifact = new MockArtifact();
- model = new Model();
- processor = new DefaultArtifactReportProcessor();
- }
-
- public void testNullArtifact()
- {
- processor.processArtifact( model, null, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.NULL_ARTIFACT, result.getReason() );
- }
-
- public void testNoProjectDescriptor()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
- setRequiredElements( artifact, VALID, VALID, VALID );
- processor.processArtifact( null, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.NULL_MODEL, result.getReason() );
- }
-
- public void testArtifactFoundButNoDirectDependencies()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
- setRequiredElements( artifact, VALID, VALID, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testArtifactNotFound()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
- setRequiredElements( artifact, VALID, VALID, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.ARTIFACT_NOT_FOUND, result.getReason() );
- }
-
- public void testValidArtifactWithNullDependency()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, VALID, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 2, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testValidArtifactWithValidSingleDependency()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, VALID, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 2, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testValidArtifactWithValidMultipleDependencies()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, VALID, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 6, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testValidArtifactWithAnInvalidDependency()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, VALID, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 5, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.DEPENDENCY_NOT_FOUND, result.getReason() );
- }
-
- public void testEmptyGroupId()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, EMPTY_STRING, VALID, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
- }
-
- public void testEmptyArtifactId()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, VALID, EMPTY_STRING, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
- }
-
- public void testEmptyVersion()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, VALID, VALID, EMPTY_STRING );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
- }
-
- public void testNullGroupId()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, null, VALID, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
- }
-
- public void testNullArtifactId()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, VALID, null, VALID );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
- }
-
- public void testNullVersion()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, VALID, VALID, null );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
- }
-
- public void testMultipleFailures()
- {
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
- processor.setRepositoryQueryLayer( queryLayer );
-
- setRequiredElements( artifact, null, null, null );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 3, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_GROUP_ID, result.getReason() );
- result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_ARTIFACT_ID, result.getReason() );
- result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_VERSION, result.getReason() );
- }
-
- public void testValidArtifactWithInvalidDependencyGroupId()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, null, VALID, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID, result.getReason() );
- }
-
- public void testValidArtifactWithInvalidDependencyArtifactId()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, null, VALID );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID, result.getReason() );
- }
-
- public void testValidArtifactWithInvalidDependencyVersion()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, VALID, VALID, null );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_VERSION, result.getReason() );
- }
-
- public void testValidArtifactWithInvalidDependencyRequiredElements()
- {
- MockArtifactFactory artifactFactory = new MockArtifactFactory();
- processor.setArtifactFactory( artifactFactory );
-
- setRequiredElements( artifact, VALID, VALID, VALID );
- MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- Dependency dependency = new Dependency();
- setRequiredElements( dependency, null, null, null );
- model.addDependency( dependency );
- queryLayer.addReturnValue( ARTIFACT_FOUND );
-
- processor.setRepositoryQueryLayer( queryLayer );
- processor.processArtifact( model, artifact, reporter, null );
- assertEquals( 1, reporter.getSuccesses() );
- assertEquals( 3, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
-
- Iterator failures = reporter.getArtifactFailureIterator();
- ArtifactResult result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_GROUP_ID, result.getReason() );
- result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_ARTIFACT_ID, result.getReason() );
- result = (ArtifactResult) failures.next();
- assertEquals( ArtifactReporter.EMPTY_DEPENDENCY_VERSION, result.getReason() );
- }
-
- protected void tearDown()
- throws Exception
- {
- model = null;
- artifact = null;
- reporter = null;
- super.tearDown();
- }
-
- private void setRequiredElements( Artifact artifact, String groupId, String artifactId, String version )
- {
- artifact.setGroupId( groupId );
- artifact.setArtifactId( artifactId );
- artifact.setVersion( version );
- }
-
- private void setRequiredElements( Dependency dependency, String groupId, String artifactId, String version )
- {
- dependency.setGroupId( groupId );
- dependency.setArtifactId( artifactId );
- dependency.setVersion( version );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.model.Model;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class ArtifactReporterTest
- extends AbstractRepositoryReportsTestCase
-{
- private ArtifactReporter reporter;
-
- private Artifact artifact;
-
- private MockArtifactReportProcessor processor;
-
- private Model model;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
- reporter = new DefaultArtifactReporter();
- ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
- processor = new MockArtifactReportProcessor();
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.setLastUpdated( "20050611.202020" );
- model = new Model();
- }
-
- public void testArtifactReporterSingleSuccess()
- {
- processor.addReturnValue( ReportCondition.SUCCESS, artifact, "all is good" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator success = reporter.getArtifactSuccessIterator();
- assertTrue( success.hasNext() );
- assertEquals( 1, reporter.getSuccesses() );
- Artifact result = ( (ArtifactResult) success.next() ).getArtifact();
- assertEquals( "groupId", result.getGroupId() );
- assertEquals( "artifactId", result.getArtifactId() );
- assertEquals( "1.0-alpha-1", result.getVersion() );
- assertFalse( success.hasNext() );
- }
-
- public void testArtifactReporterMultipleSuccess()
- {
- processor.clearList();
- processor.addReturnValue( ReportCondition.SUCCESS, artifact, "one" );
- processor.addReturnValue( ReportCondition.SUCCESS, artifact, "two" );
- processor.addReturnValue( ReportCondition.SUCCESS, artifact, "three" );
- reporter = new DefaultArtifactReporter();
- processor.processArtifact( model, artifact, reporter, null );
- Iterator success = reporter.getArtifactSuccessIterator();
- assertTrue( success.hasNext() );
- int i;
- for ( i = 0; success.hasNext(); i++ )
- {
- success.next();
- }
- assertEquals( 3, i );
- assertEquals( 3, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testArtifactReporterSingleFailure()
- {
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertTrue( failure.hasNext() );
- failure.next();
- assertFalse( failure.hasNext() );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 1, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testArtifactReporterMultipleFailure()
- {
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed twice" );
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed thrice" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertTrue( failure.hasNext() );
- int i;
- for ( i = 0; failure.hasNext(); i++ )
- {
- failure.next();
- }
- assertEquals( 3, i );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 3, reporter.getFailures() );
- assertEquals( 0, reporter.getWarnings() );
- }
-
- public void testFailureMessages()
- {
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed once" );
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed twice" );
- processor.addReturnValue( ReportCondition.FAILURE, artifact, "failed thrice" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator failure = reporter.getArtifactFailureIterator();
- assertEquals( "failed once", ( (ArtifactResult) failure.next() ).getReason() );
- assertEquals( "failed twice", ( (ArtifactResult) failure.next() ).getReason() );
- assertEquals( "failed thrice", ( (ArtifactResult) failure.next() ).getReason() );
- }
-
- public void testArtifactReporterSingleWarning()
- {
- processor.addReturnValue( ReportCondition.WARNING, artifact, "you've been warned" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertTrue( warning.hasNext() );
- warning.next();
- assertFalse( warning.hasNext() );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 1, reporter.getWarnings() );
- }
-
- public void testArtifactReporterMultipleWarning()
- {
- processor.addReturnValue( ReportCondition.WARNING, artifact, "i'm warning you" );
- processor.addReturnValue( ReportCondition.WARNING, artifact, "you have to stop now" );
- processor.addReturnValue( ReportCondition.WARNING, artifact, "all right... that does it!" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertTrue( warning.hasNext() );
- int i;
- for ( i = 0; warning.hasNext(); i++ )
- {
- warning.next();
- }
- assertEquals( 3, i );
- assertEquals( 0, reporter.getSuccesses() );
- assertEquals( 0, reporter.getFailures() );
- assertEquals( 3, reporter.getWarnings() );
- }
-
- public void testWarningMessages()
- {
- processor.addReturnValue( ReportCondition.WARNING, artifact, "i'm warning you" );
- processor.addReturnValue( ReportCondition.WARNING, artifact, "you have to stop now" );
- processor.addReturnValue( ReportCondition.WARNING, artifact, "all right... that does it!" );
- processor.processArtifact( model, artifact, reporter, null );
- Iterator warning = reporter.getArtifactWarningIterator();
- assertEquals( "i'm warning you", ( (ArtifactResult) warning.next() ).getReason() );
- assertEquals( "you have to stop now", ( (ArtifactResult) warning.next() ).getReason() );
- assertEquals( "all right... that does it!", ( (ArtifactResult) warning.next() ).getReason() );
- }
-
- protected void tearDown()
- throws Exception
- {
- model = null;
- processor.clearList();
- processor = null;
- reporter = null;
- super.tearDown();
- }
-
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Plugin;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-
-import java.util.Iterator;
-
-/**
- * @todo??? should use MetadataXpp3Reader instead ?
- */
-public class BadMetadataReportProcessorTest
- extends AbstractRepositoryReportsTestCase
-{
- private ArtifactFactory artifactFactory;
-
- private MetadataReportProcessor badMetadataReportProcessor;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
-
- badMetadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE );
- }
-
- public void testMetadataMissingLastUpdated()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
-
- RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
- assertEquals( "check reason", "Missing lastUpdated element inside the metadata.", result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testMetadataValidVersions()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
- versioning.setLastUpdated( "20050611.202020" );
-
- RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertFalse( "check there are no failures", failures.hasNext() );
- }
-
- public void testMetadataMissingADirectory()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.setLastUpdated( "20050611.202020" );
-
- RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
- // TODO: should be more robust
- assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testMetadataInvalidArtifactVersion()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
- versioning.addVersion( "1.0-alpha-3" );
- versioning.setLastUpdated( "20050611.202020" );
-
- RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
- // TODO: should be more robust
- assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testMoreThanOneMetadataVersionErrors()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-3" );
- versioning.setLastUpdated( "20050611.202020" );
-
- RepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact, versioning );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
- // TODO: should be more robust
- assertEquals( "check reason",
- "Artifact version 1.0-alpha-3 is present in metadata but missing in the repository.",
- result.getReason() );
- assertTrue( "check there is a 2nd failure", failures.hasNext() );
- result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason",
- "Artifact version 1.0-alpha-2 found in the repository but missing in the metadata.",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testValidPluginMetadata()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertFalse( "check there are no failures", failures.hasNext() );
- }
-
- public void testMissingMetadataPlugin()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "missing-plugin", "default3" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Metadata plugin missing-plugin not found in the repository",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testIncompletePluginMetadata()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason",
- "Plugin snapshot-artifact is present in the repository but " + "missing in the metadata.",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testInvalidPluginArtifactId()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default2" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( null, "default3" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "", "default4" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
- assertTrue( "check there is a 2nd failure", failures.hasNext() );
- result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Missing or empty artifactId in group metadata.", result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testInvalidPluginPrefix()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", null ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Missing or empty plugin prefix for artifactId artifactId.", result.getReason() );
- assertTrue( "check there is a 2nd failure", failures.hasNext() );
- result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Missing or empty plugin prefix for artifactId snapshot-artifact.",
- result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testDuplicatePluginPrefixes()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- RepositoryMetadata metadata = new GroupRepositoryMetadata( "groupId" );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "artifactId", "default" ) );
- metadata.getMetadata().addPlugin( createMetadataPlugin( "snapshot-artifact", "default" ) );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- // TODO: should be more robust
- assertEquals( "check reason", "Duplicate plugin prefix found: default.", result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- public void testValidSnapshotMetadata()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
-
- Snapshot snapshot = new Snapshot();
- snapshot.setBuildNumber( 1 );
- snapshot.setTimestamp( "20050611.202024" );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertFalse( "check there are no failures", failures.hasNext() );
- }
-
- public void testInvalidSnapshotMetadata()
- throws ReportProcessorException
- {
- ArtifactReporter reporter = new MockArtifactReporter();
-
- Artifact artifact =
- artifactFactory.createBuildArtifact( "groupId", "snapshot-artifact", "1.0-alpha-1-SNAPSHOT", "type" );
-
- Snapshot snapshot = new Snapshot();
- snapshot.setBuildNumber( 2 );
- snapshot.setTimestamp( "20050611.202024" );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact, snapshot );
-
- badMetadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator failures = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check there is a failure", failures.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) failures.next();
- assertEquals( "check metadata", metadata, result.getMetadata() );
- // TODO: should be more robust
- assertEquals( "check reason", "Snapshot artifact 20050611.202024-2 does not exist.", result.getReason() );
- assertFalse( "check no more failures", failures.hasNext() );
- }
-
- private Plugin createMetadataPlugin( String artifactId, String prefix )
- {
- Plugin plugin = new Plugin();
- plugin.setArtifactId( artifactId );
- plugin.setName( artifactId );
- plugin.setPrefix( prefix );
- return plugin;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import junit.framework.TestCase;
-
-/**
- *
- */
-public class CacheTest
- extends TestCase
-{
- private Cache cache;
-
- private static final double CACHE_HIT_RATIO = 0.5;
-
- private static final double CACHE_HIT_RATIO_THRESHOLD = 0.75;
-
- public void testCacheManagementBasedOnHitsRatio()
- {
- cache = new Cache( CACHE_HIT_RATIO );
- newCacheObjectTests();
-
- String key = "key";
- String value = "value";
- for ( int ctr = 1; ctr < 10; ctr++ )
- {
- cache.put( key + ctr, value + ctr );
- }
-
- while ( cache.getHitRate() < CACHE_HIT_RATIO_THRESHOLD )
- {
- cache.get( "key2" );
- }
- cache.put( "key10", "value10" );
- assertNull( "first key must be expired", cache.get( "key1" ) );
- }
-
- public void testCacheManagementBasedOnCacheSize()
- {
- cache = new Cache( 9 );
- newCacheObjectTests();
-
- String key = "key";
- String value = "value";
- for ( int ctr = 1; ctr < 10; ctr++ )
- {
- cache.put( key + ctr, value + ctr );
- }
-
- cache.put( "key10", "value10" );
- assertNull( "first key must be expired", cache.get( "key1" ) );
- assertEquals( "check cache size to be max size", 9, cache.size() );
- }
-
- public void testCacheManagementBasedOnCacheSizeAndHitRate()
- {
- cache = new Cache( CACHE_HIT_RATIO, 9 );
- newCacheObjectTests();
-
- String key = "key";
- String value = "value";
- for ( int ctr = 1; ctr < 5; ctr++ )
- {
- cache.put( key + ctr, value + ctr );
- }
-
- while ( cache.getHitRate() < CACHE_HIT_RATIO )
- {
- cache.get( "key3" );
- }
-
- cache.put( "key10", "value10" );
- assertNull( "first key must be expired", cache.get( "key1" ) );
-
- while ( cache.getHitRate() >= CACHE_HIT_RATIO )
- {
- cache.get( "key11" );
- }
-
- for ( int ctr = 5; ctr < 10; ctr++ )
- {
- cache.put( key + ctr, value + ctr );
- }
-
- cache.put( "key11", "value11" );
- assertNull( "second key must be expired", cache.get( "key2" ) );
- assertEquals( "check cache size to be max size", 9, cache.size() );
- }
-
- public void testCacheOnRedundantData()
- {
- cache = new Cache( CACHE_HIT_RATIO, 9 );
- newCacheObjectTests();
-
- String key = "key";
- String value = "value";
- for ( int ctr = 1; ctr < 10; ctr++ )
- {
- cache.put( key + ctr, value + ctr );
- }
-
- cache.put( "key1", "value1" );
- cache.put( "key10", "value10" );
- assertNull( "second key must be gone", cache.get( "key2" ) );
- assertEquals( "check cache size to be max size", 9, cache.size() );
- }
-
- private void newCacheObjectTests()
- {
- assertEquals( (double) 0, cache.getHitRate(), 0 );
- assertEquals( "check cache size", 0, cache.size() );
-
- String value = "value";
- String key = "key";
-
- cache.put( key, value );
- assertEquals( "check cache hit", value, cache.get( key ) );
- assertEquals( (double) 1, cache.getHitRate(), 0 );
- assertEquals( "check cache size", 1, cache.size() );
- assertNull( "check cache miss", cache.get( "none" ) );
- assertEquals( CACHE_HIT_RATIO, cache.getHitRate(), 0 );
- cache.clear();
- assertNull( "check flushed object", cache.get( "key" ) );
- assertEquals( (double) 0, cache.getHitRate(), 0 );
- assertEquals( "check flushed cache size", 0, cache.size() );
- cache.clear();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- *
- */
-public class CachedRepositoryQueryLayerTest
- extends AbstractRepositoryQueryLayerTestCase
-{
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- queryLayer = new CachedRepositoryQueryLayer( repository );
- }
-
- public void testUseFileCache()
- {
- testContainsArtifactTrue();
- assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
- testContainsArtifactTrue();
- assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
- }
-
- public void testUseMetadataCache()
- throws Exception
- {
- testArtifactVersionsTrue();
- assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
- testArtifactVersionsTrue();
- assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
- }
-
- public void testUseFileCacheOnSnapshot()
- {
- testContainsSnapshotArtifactTrue();
- assertEquals( 0, queryLayer.getCacheHitRate(), 0 );
- testContainsSnapshotArtifactTrue();
- assertEquals( CachedRepositoryQueryLayer.CACHE_HIT_RATIO, queryLayer.getCacheHitRate(), 0 );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.GroupRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.repository.digest.DigesterException;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-
-/**
- * This class tests the ChecksumArtifactReporter.
- * It extends the AbstractChecksumArtifactReporterTestCase class.
- */
-public class ChecksumArtifactReporterTest
- extends AbstractChecksumArtifactReporterTestCase
-{
- private ArtifactReportProcessor artifactReportProcessor;
-
- private ArtifactReporter reporter = new MockArtifactReporter();
-
- private MetadataReportProcessor metadataReportProcessor;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
- artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "checksum" );
- metadataReportProcessor = (MetadataReportProcessor) lookup( MetadataReportProcessor.ROLE, "checksum-metadata" );
- }
-
- /**
- * Test the ChecksumArtifactReporter when the checksum files are valid.
- */
- public void testChecksumArtifactReporterSuccess()
- throws ReportProcessorException, IOException, DigesterException
- {
- createChecksumFile( "VALID" );
- createChecksumFile( "INVALID" );
-
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getSuccesses() );
- }
-
- /**
- * Test the ChecksumArtifactReporter when the checksum files are invalid.
- */
- public void testChecksumArtifactReporterFailed()
- throws ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getFailures() );
- }
-
- /**
- * Test the valid checksum of a metadata file.
- * The reporter should report 2 success validation.
- */
- public void testChecksumMetadataReporterSuccess()
- throws ReportProcessorException, DigesterException, IOException
- {
- createMetadataFile( "VALID" );
- createMetadataFile( "INVALID" );
-
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
-
- //Version level metadata
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- //Artifact level metadata
- metadata = new ArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- //Group level metadata
- metadata = new GroupRepositoryMetadata( "checksumTest" );
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator iter = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "check if there is a success", iter.hasNext() );
- }
-
- /**
- * Test the corrupted checksum of a metadata file.
- * The reporter must report 2 failures.
- */
- public void testChecksumMetadataReporterFailure()
- throws ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "invalidArtifact", version, "compile", "jar", "", handler );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator iter = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check if there is a failure", iter.hasNext() );
- }
-
- /**
- * Test the checksum of an artifact located in a remote location.
- */
- /* public void testChecksumArtifactRemote()
- {
- ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
- VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
- Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
- remoteArtifactType, "", handler );
- ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
- new DefaultRepositoryLayout() );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- if ( reporter.getFailures() == 2 )
- assertTrue( reporter.getFailures() == 2 );
-
- if ( reporter.getSuccesses() == 2 )
- assertTrue( reporter.getSuccesses() == 2 );
-
- }
- */
-
- /**
- * Test the checksum of a metadata file located in a remote location.
- */
- /* public void testChecksumMetadataRemote()
- {
-
- try
- {
- ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
- VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
- Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version,
- remoteArtifactScope, remoteArtifactType, "", handler );
- ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
- new DefaultRepositoryLayout() );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
-
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
- Iterator iter = reporter.getRepositoryMetadataFailureIterator();
- if ( iter.hasNext() )
- assertTrue( "check if there is a failure", iter.hasNext() );
-
- iter = reporter.getRepositoryMetadataSuccessIterator();
- if ( iter.hasNext() )
- assertTrue( "check if there is a success", iter.hasNext() );
-
- }
- catch ( Exception e )
- {
- e.printStackTrace();
- }
- }
- */
-
- /**
- * Test the conditional when the checksum files of the artifact & metadata do not exist.
- */
- public void testChecksumFilesDoNotExist()
- throws ReportProcessorException, DigesterException, IOException
- {
- createChecksumFile( "VALID" );
- createMetadataFile( "VALID" );
- deleteChecksumFiles( "jar" );
-
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0" );
- Artifact artifact =
- new DefaultArtifact( "checksumTest", "validArtifact", version, "compile", "jar", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 2, reporter.getFailures() );
-
- RepositoryMetadata metadata = new SnapshotArtifactRepositoryMetadata( artifact );
- metadataReportProcessor.processMetadata( metadata, repository, reporter );
-
- Iterator iter = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "check if there is a failure", iter.hasNext() );
-
- deleteTestDirectory( new File( repository.getBasedir() + "checksumTest" ) );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-
-import java.util.Iterator;
-
-/**
- *
- */
-public class DefaultArtifactReporterTest
- extends AbstractRepositoryReportsTestCase
-{
- private ArtifactReporter reporter;
-
- private Artifact artifact;
-
- private RepositoryMetadata metadata;
-
- public void testEmptyArtifactReporter()
- {
- assertEquals( "No failures", 0, reporter.getFailures() );
- assertEquals( "No warnings", 0, reporter.getWarnings() );
- assertEquals( "No successes", 0, reporter.getSuccesses() );
- assertFalse( "No artifact failures", reporter.getArtifactFailureIterator().hasNext() );
- assertFalse( "No artifact warnings", reporter.getArtifactWarningIterator().hasNext() );
- assertFalse( "No artifact successes", reporter.getArtifactSuccessIterator().hasNext() );
- assertFalse( "No metadata failures", reporter.getRepositoryMetadataFailureIterator().hasNext() );
- assertFalse( "No metadata warnings", reporter.getRepositoryMetadataWarningIterator().hasNext() );
- assertFalse( "No metadata successes", reporter.getRepositoryMetadataSuccessIterator().hasNext() );
- }
-
- public void testMetadataSingleFailure()
- {
- reporter.addFailure( metadata, "Single Failure Reason" );
- assertEquals( "failures count", 1, reporter.getFailures() );
- assertEquals( "warnings count", 0, reporter.getWarnings() );
- assertEquals( "successes count", 0, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "Single Failure Reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataMultipleFailures()
- {
- reporter.addFailure( metadata, "First Failure Reason" );
- reporter.addFailure( metadata, "Second Failure Reason" );
- assertEquals( "failures count", 2, reporter.getFailures() );
- assertEquals( "warnings count", 0, reporter.getWarnings() );
- assertEquals( "successes count", 0, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataFailureIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "First Failure Reason", result.getReason() );
- assertTrue( "must have 2nd failure", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "Second Failure Reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataSingleWarning()
- {
- reporter.addWarning( metadata, "Single Warning Message" );
- assertEquals( "failures count", 0, reporter.getFailures() );
- assertEquals( "warnings count", 1, reporter.getWarnings() );
- assertEquals( "successes count", 0, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataWarningIterator();
- assertTrue( "must have failures", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "Single Warning Message", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataMultipleWarnings()
- {
- reporter.addWarning( metadata, "First Warning" );
- reporter.addWarning( metadata, "Second Warning" );
- assertEquals( "failures count", 0, reporter.getFailures() );
- assertEquals( "warnings count", 2, reporter.getWarnings() );
- assertEquals( "successes count", 0, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataWarningIterator();
- assertTrue( "must have warnings", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "First Warning", result.getReason() );
- assertTrue( "must have 2nd warning", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check failure cause", metadata, result.getMetadata() );
- assertEquals( "check failure reason", "Second Warning", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataSingleSuccess()
- {
- reporter.addSuccess( metadata );
- assertEquals( "failures count", 0, reporter.getFailures() );
- assertEquals( "warnings count", 0, reporter.getWarnings() );
- assertEquals( "successes count", 1, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "must have successes", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertFalse( "no more failures", results.hasNext() );
- }
-
- public void testMetadataMultipleSuccesses()
- {
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-beta-1" );
- versioning.addVersion( "1.0-beta-2" );
- RepositoryMetadata metadata2 = new ArtifactRepositoryMetadata( artifact, versioning );
-
- reporter.addSuccess( metadata );
- reporter.addSuccess( metadata2 );
- assertEquals( "failures count", 0, reporter.getFailures() );
- assertEquals( "warnings count", 0, reporter.getWarnings() );
- assertEquals( "successes count", 2, reporter.getSuccesses() );
-
- Iterator results = reporter.getRepositoryMetadataSuccessIterator();
- assertTrue( "must have successes", results.hasNext() );
- RepositoryMetadataResult result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertTrue( "must have 2nd success", results.hasNext() );
- result = (RepositoryMetadataResult) results.next();
- assertEquals( "check success metadata", metadata2, result.getMetadata() );
- assertNull( "check no reason", result.getReason() );
- assertFalse( "no more successes", results.hasNext() );
- }
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- reporter = new DefaultArtifactReporter();
- ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- artifact = artifactFactory.createBuildArtifact( "groupId", "artifactId", "1.0-alpha-1", "type" );
-
- Versioning versioning = new Versioning();
- versioning.addVersion( "1.0-alpha-1" );
- versioning.addVersion( "1.0-alpha-2" );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
-
- reporter = null;
- metadata = null;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.model.Model;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.record.RepositoryIndexRecordFactory;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.File;
-import java.util.Collections;
-
-/**
- * @author Edwin Punzalan
- */
-public class DuplicateArtifactFileReportProcessorTest
- extends AbstractRepositoryReportsTestCase
-{
- private Artifact artifact;
-
- private Model model;
-
- private ArtifactReportProcessor processor;
-
- private ArtifactFactory artifactFactory;
-
- File indexDirectory;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- indexDirectory = getTestFile( "target/indexDirectory" );
- FileUtils.deleteDirectory( indexDirectory );
-
- artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
- artifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "jar" );
- model = new Model();
-
- RepositoryArtifactIndexFactory factory =
- (RepositoryArtifactIndexFactory) lookup( RepositoryArtifactIndexFactory.ROLE, "lucene" );
-
- RepositoryArtifactIndex index = factory.createStandardIndex( indexDirectory );
-
- RepositoryIndexRecordFactory recordFactory =
- (RepositoryIndexRecordFactory) lookup( RepositoryIndexRecordFactory.ROLE, "standard" );
-
- index.indexRecords( Collections.singletonList( recordFactory.createRecord( artifact ) ) );
-
- processor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "duplicate" );
- }
-
- public void testNullArtifactFile()
- throws Exception
- {
- artifact.setFile( null );
-
- MockArtifactReporter reporter = new MockArtifactReporter();
-
- processor.processArtifact( model, artifact, reporter, repository );
-
- assertEquals( "Check no successes", 0, reporter.getSuccesses() );
- assertEquals( "Check warnings", 1, reporter.getWarnings() );
- assertEquals( "Check no failures", 0, reporter.getFailures() );
- }
-
- public void testSuccessOnAlreadyIndexedArtifact()
- throws Exception
- {
- MockArtifactReporter reporter = new MockArtifactReporter();
-
- processor.processArtifact( model, artifact, reporter, repository );
-
- assertEquals( "Check no successes", 1, reporter.getSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getWarnings() );
- assertEquals( "Check no failures", 0, reporter.getFailures() );
- }
-
- public void testSuccessOnDifferentGroupId()
- throws Exception
- {
- MockArtifactReporter reporter = new MockArtifactReporter();
-
- artifact.setGroupId( "different.groupId" );
- processor.processArtifact( model, artifact, reporter, repository );
-
- assertEquals( "Check no successes", 1, reporter.getSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getWarnings() );
- assertEquals( "Check no failures", 0, reporter.getFailures() );
- }
-
- public void testSuccessOnNewArtifact()
- throws Exception
- {
- Artifact newArtifact = createArtifact( "groupId", "artifactId", "1.0-alpha-1", "1.0-alpha-1", "pom" );
-
- MockArtifactReporter reporter = new MockArtifactReporter();
-
- processor.processArtifact( model, newArtifact, reporter, repository );
-
- assertEquals( "Check no successes", 1, reporter.getSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getWarnings() );
- assertEquals( "Check no failures", 0, reporter.getFailures() );
- }
-
- public void testFailure()
- throws Exception
- {
- Artifact duplicate = createArtifact( artifact.getGroupId(), "snapshot-artifact", "1.0-alpha-1-SNAPSHOT",
- artifact.getVersion(), artifact.getType() );
- duplicate.setFile( artifact.getFile() );
-
- MockArtifactReporter reporter = new MockArtifactReporter();
-
- processor.processArtifact( model, duplicate, reporter, repository );
-
- assertEquals( "Check no successes", 0, reporter.getSuccesses() );
- assertEquals( "Check warnings", 0, reporter.getWarnings() );
- assertEquals( "Check no failures", 1, reporter.getFailures() );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
- String type )
- {
- Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
- artifact.setBaseVersion( baseVersion );
- artifact.setRepository( repository );
- artifact.setFile( new File( repository.getBasedir(), repository.pathOf( artifact ) ) );
- return artifact;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.Method;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Edwin Punzalan
- */
-public class GenericMockObject
- implements InvocationHandler
-{
- private Map invocations = new HashMap();
-
- public GenericMockObject()
- {
- //default constructor
- }
-
- public GenericMockObject( Map returnMap )
- {
- invocations = new HashMap( returnMap );
- }
-
- public void setExpectedReturns( Method method, List returnList )
- {
- invocations.put( method, returnList );
- }
-
- public Object invoke( Object proxy, Method method, Object[] args )
- {
- if ( !invocations.containsKey( method ) )
- {
- throw new UnsupportedOperationException( "No expected return values defined." );
- }
-
- List returnList = (List) invocations.get( method );
- if ( returnList.size() < 1 )
- {
- throw new UnsupportedOperationException( "Too few expected return values defined." );
- }
- return returnList.remove( 0 );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.versioning.VersionRange;
-
-/**
- * This class tests the InvalidPomArtifactReportProcessor class.
- */
-public class InvalidPomArtifactReportProcessorTest
- extends AbstractRepositoryReportsTestCase
-{
- private ArtifactReportProcessor artifactReportProcessor;
-
- private ArtifactReporter reporter = new MockArtifactReporter();
-
- public void setUp()
- throws Exception
- {
- super.setUp();
- artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "invalid-pom" );
- }
-
- /**
- * Test the InvalidPomArtifactReportProcessor when the artifact is an invalid pom.
- */
- public void testInvalidPomArtifactReportProcessorFailure()
- throws ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-3" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "artifactId", version, "compile", "pom", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 1, reporter.getFailures() );
- }
-
-
- /**
- * Test the InvalidPomArtifactReportProcessor when the artifact is a valid pom.
- */
- public void testInvalidPomArtifactReportProcessorSuccess()
- throws ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "pom", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 1, reporter.getSuccesses() );
- }
-
-
- /**
- * Test the InvalidPomArtifactReportProcessor when the artifact is not a pom.
- */
- public void testNotAPomArtifactReportProcessorSuccess()
- throws ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
-
- artifactReportProcessor.processArtifact( null, artifact, reporter, repository );
- assertEquals( 1, reporter.getWarnings() );
- }
-
- /**
- * Test the InvalidPomArtifactReportProcessor when the pom is located in
- * a remote repository.
- */
- /* public void testRemotePomArtifactReportProcessorSuccess(){
- try{
- ArtifactHandler handler = new DefaultArtifactHandler( "pom" );
- VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
- Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
- "pom", "", handler );
- ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
- new DefaultRepositoryLayout() );
-
- artifactReportProcessor.processArtifact(null, artifact, reporter, repository);
- if(reporter.getSuccesses() == 1)
- assertTrue(reporter.getSuccesses() == 1);
-
- }catch(Exception e){
-
- }
- }
- */
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.DefaultArtifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.handler.DefaultArtifactHandler;
-import org.apache.maven.artifact.versioning.VersionRange;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.Reader;
-
-/**
- * This class tests the LocationArtifactReportProcessor.
- */
-public class LocationArtifactReportProcessorTest
- extends AbstractRepositoryReportsTestCase
-{
- private ArtifactReportProcessor artifactReportProcessor;
-
- private ArtifactReporter reporter = new MockArtifactReporter();
-
- private MavenXpp3Reader pomReader;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
- artifactReportProcessor = (ArtifactReportProcessor) lookup( ArtifactReportProcessor.ROLE, "artifact-location" );
- pomReader = new MavenXpp3Reader();
- }
-
- public void tearDown()
- throws Exception
- {
- super.tearDown();
- artifactReportProcessor = null;
- pomReader = null;
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact's physical location matches the location specified
- * both in the file system pom and in the pom included in the package.
- */
- public void testPackagedPomLocationArtifactReporterSuccess()
- throws ReportProcessorException, IOException, XmlPullParserException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.0" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-model", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-model/2.0/maven-model-2.0.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getSuccesses() );
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact is in the location specified in the
- * file system pom (but the jar file does not have a pom included in its package).
- */
- public void testLocationArtifactReporterSuccess()
- throws ReportProcessorException, IOException, XmlPullParserException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-1" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
-
- String path = "groupId/artifactId/1.0-alpha-1/artifactId-1.0-alpha-1.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getSuccesses() );
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact is not in the location specified
- * in the file system pom.
- */
- public void testLocationArtifactReporterFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "1.0-alpha-2" );
- Artifact artifact = new DefaultArtifact( "groupId", "artifactId", version, "compile", "jar", "", handler );
-
- String path = "groupId/artifactId/1.0-alpha-2/artifactId-1.0-alpha-2.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getFailures() );
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact's physical location does not match the
- * location in the file system pom but instead matches the specified location in the packaged pom.
- */
- public void testFsPomArtifactMatchFailure()
- throws IOException, ReportProcessorException, XmlPullParserException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.0" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-archiver", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-archiver/2.0/maven-archiver-2.0.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getFailures() );
- }
-
- private Model readPom( String path )
- throws IOException, XmlPullParserException
- {
- Reader reader = new FileReader( new File( repository.getBasedir(), path ) );
- Model model = pomReader.read( reader );
- // hokey inheritence to avoid some errors right now
- if ( model.getGroupId() == null )
- {
- model.setGroupId( model.getParent().getGroupId() );
- }
- if ( model.getVersion() == null )
- {
- model.setVersion( model.getParent().getVersion() );
- }
- return model;
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact's physical location does not match the
- * location specified in the packaged pom but matches the location specified in the file system pom.
- */
- public void testPkgPomArtifactMatchFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.1" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-monitor", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-monitor/2.1/maven-monitor-2.1.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getFailures() );
- }
-
- /**
- * Test the LocationArtifactReporter when the artifact's physical location does not match both the
- * location specified in the packaged pom and the location specified in the file system pom.
- */
- public void testBothPomArtifactMatchFailure()
- throws IOException, XmlPullParserException, ReportProcessorException
- {
- ArtifactHandler handler = new DefaultArtifactHandler( "jar" );
- VersionRange version = VersionRange.createFromVersion( "2.1" );
- Artifact artifact =
- new DefaultArtifact( "org.apache.maven", "maven-project", version, "compile", "jar", "", handler );
-
- String path = "org/apache/maven/maven-project/2.1/maven-project-2.1.pom";
- Model model = readPom( path );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- assertEquals( 1, reporter.getFailures() );
- }
-
- /**
- * Test the LocationArtifactReportProcessor when the artifact is located in the remote repository.
- */
- /* public void testRemoteArtifactReportProcessorFailure()
- {
-
- ArtifactHandler handler = new DefaultArtifactHandler( remoteArtifactType );
- VersionRange version = VersionRange.createFromVersion( remoteArtifactVersion );
- Artifact artifact = new DefaultArtifact( remoteArtifactGroup, remoteArtifactId, version, remoteArtifactScope,
- remoteArtifactType, "", handler );
- ArtifactRepository repository = new DefaultArtifactRepository( remoteRepoId, remoteRepoUrl,
- new DefaultRepositoryLayout() );
- try
- {
- URL url = new URL( remoteRepoUrl + remoteArtifactGroup + "/" + remoteArtifactId + "/"
- + remoteArtifactVersion + "/" + remoteArtifactId + "-" + remoteArtifactVersion + ".pom" );
- InputStream is = url.openStream();
- Reader reader = new InputStreamReader( is );
- Model model = pomReader.read( reader );
-
- artifactReportProcessor.processArtifact( model, artifact, reporter, repository );
- if ( reporter.getFailures() > 0 )
- assertTrue( reporter.getFailures() == 1 );
-
- if ( reporter.getSuccesses() > 0 )
- assertTrue( reporter.getSuccesses() == 1 );
-
- }
- catch ( Exception e )
- {
- e.printStackTrace();
- }
- }
- */
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.handler.ArtifactHandler;
-import org.apache.maven.artifact.metadata.ArtifactMetadata;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
-import org.apache.maven.artifact.versioning.ArtifactVersion;
-import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
-import org.apache.maven.artifact.versioning.VersionRange;
-
-import java.io.File;
-import java.util.Collection;
-import java.util.List;
-
-/**
- * @noinspection ReturnOfNull
- */
-public class MockArtifact
- implements Artifact
-{
- private String groupId;
-
- private String artifactId;
-
- private String version;
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public String getVersion()
- {
- return version;
- }
-
- public void setVersion( String s )
- {
- version = s;
- }
-
- public String getScope()
- {
- return null;
- }
-
- public String getType()
- {
- return null;
- }
-
- public String getClassifier()
- {
- return null;
- }
-
- public boolean hasClassifier()
- {
- return false;
- }
-
- public File getFile()
- {
- return null;
- }
-
- public void setFile( File file )
- {
- }
-
- public String getBaseVersion()
- {
- return null;
- }
-
- public void setBaseVersion( String s )
- {
- }
-
- public String getId()
- {
- return null;
- }
-
- public String getDependencyConflictId()
- {
- return null;
- }
-
- public void addMetadata( ArtifactMetadata artifactMetadata )
- {
- }
-
- public Collection getMetadataList()
- {
- return null;
- }
-
- public void setRepository( ArtifactRepository artifactRepository )
- {
- }
-
- public ArtifactRepository getRepository()
- {
- return null;
- }
-
- public void updateVersion( String s, ArtifactRepository artifactRepository )
- {
- }
-
- public String getDownloadUrl()
- {
- return null;
- }
-
- public void setDownloadUrl( String s )
- {
- }
-
- public ArtifactFilter getDependencyFilter()
- {
- return null;
- }
-
- public void setDependencyFilter( ArtifactFilter artifactFilter )
- {
- }
-
- public ArtifactHandler getArtifactHandler()
- {
- return null;
- }
-
- public List getDependencyTrail()
- {
- return null;
- }
-
- public void setDependencyTrail( List list )
- {
- }
-
- public void setScope( String s )
- {
- }
-
- public VersionRange getVersionRange()
- {
- return null;
- }
-
- public void setVersionRange( VersionRange versionRange )
- {
- }
-
- public void selectVersion( String s )
- {
- }
-
- public void setGroupId( String s )
- {
- groupId = s;
- }
-
- public void setArtifactId( String s )
- {
- artifactId = s;
- }
-
- public boolean isSnapshot()
- {
- return false;
- }
-
- public void setResolved( boolean b )
- {
- }
-
- public boolean isResolved()
- {
- return false;
- }
-
- public void setResolvedVersion( String s )
- {
- }
-
- public void setArtifactHandler( ArtifactHandler artifactHandler )
- {
- }
-
- public boolean isRelease()
- {
- return false;
- }
-
- public void setRelease( boolean b )
- {
- }
-
- public List getAvailableVersions()
- {
- return null;
- }
-
- public void setAvailableVersions( List list )
- {
- }
-
- public boolean isOptional()
- {
- return false;
- }
-
- public ArtifactVersion getSelectedVersion()
- throws OverConstrainedVersionException
- {
- return null;
- }
-
- public boolean isSelectedVersionKnown()
- throws OverConstrainedVersionException
- {
- return false;
- }
-
- public int compareTo( Object o )
- {
- return 0;
- }
-
- public void setOptional( boolean b )
- {
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.versioning.VersionRange;
-
-/**
- * @noinspection ReturnOfNull
- */
-public class MockArtifactFactory
- implements ArtifactFactory
-{
- public Artifact createArtifact( String s, String s1, String s2, String s3, String s4 )
- {
- return null;
- }
-
- public Artifact createArtifactWithClassifier( String s, String s1, String s2, String s3, String s4 )
- {
- return null;
- }
-
- public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
- String s4 )
- {
- return null;
- }
-
- public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
- String s4, String s5 )
- {
- return null;
- }
-
- public Artifact createDependencyArtifact( String s, String s1, VersionRange versionRange, String s2, String s3,
- String s4, String s5, boolean b )
- {
- return null;
- }
-
- public Artifact createBuildArtifact( String s, String s1, String s2, String s3 )
- {
- return null;
- }
-
- public Artifact createProjectArtifact( String s, String s1, String s2 )
- {
- return null;
- }
-
- public Artifact createParentArtifact( String s, String s1, String s2 )
- {
- return null;
- }
-
- public Artifact createPluginArtifact( String s, String s1, VersionRange versionRange )
- {
- return null;
- }
-
- public Artifact createProjectArtifact( String s, String s1, String s2, String s3 )
- {
- return null;
- }
-
- public Artifact createExtensionArtifact( String s, String s1, VersionRange versionRange )
- {
- return null;
- }
-
- public Artifact createDependencyArtifact( String string, String string1, VersionRange versionRange, String string2,
- String string3, String string4, boolean b )
- {
- return null;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- */
-public class MockArtifactReportProcessor
- implements ArtifactReportProcessor
-{
- private List reportConditions;
-
- private Iterator iterator;
-
- public MockArtifactReportProcessor()
- {
- reportConditions = new ArrayList();
- }
-
- public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
- ArtifactRepository repository )
- {
- if ( iterator == null || !iterator.hasNext() ) // not initialized or reached end of the list. start again
- {
- iterator = reportConditions.iterator();
- }
- if ( !reportConditions.isEmpty() )
- {
- while ( iterator.hasNext() )
- {
- ReportCondition reportCondition = (ReportCondition) iterator.next();
- int i = reportCondition.getResult();
- if ( i == ReportCondition.SUCCESS )
- {
- reporter.addSuccess( reportCondition.getArtifact() );
- }
- else if ( i == ReportCondition.WARNING )
- {
- reporter.addWarning( reportCondition.getArtifact(), reportCondition.getReason() );
- }
- else if ( i == ReportCondition.FAILURE )
- {
- reporter.addFailure( reportCondition.getArtifact(), reportCondition.getReason() );
- }
- }
- }
- }
-
- public void addReturnValue( int result, Artifact artifact, String reason )
- {
- reportConditions.add( new ReportCondition( result, artifact, reason ) );
- }
-
- public void clearList()
- {
- reportConditions.clear();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Mock implementation of the artifact reporter.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @version $Id$
- */
-public class MockArtifactReporter
- implements ArtifactReporter
-{
- private List artifactFailures = new ArrayList();
-
- private List artifactSuccesses = new ArrayList();
-
- private List artifactWarnings = new ArrayList();
-
- private List metadataFailures = new ArrayList();
-
- private List metadataSuccesses = new ArrayList();
-
- private List metadataWarnings = new ArrayList();
-
- public void addFailure( Artifact artifact, String reason )
- {
- artifactFailures.add( new ArtifactResult( artifact, reason ) );
- }
-
- public void addSuccess( Artifact artifact )
- {
- artifactSuccesses.add( new ArtifactResult( artifact ) );
- }
-
- public void addWarning( Artifact artifact, String message )
- {
- artifactWarnings.add( new ArtifactResult( artifact, message ) );
- }
-
- public void addFailure( RepositoryMetadata metadata, String reason )
- {
- metadataFailures.add( new RepositoryMetadataResult( metadata, reason ) );
- }
-
- public void addSuccess( RepositoryMetadata metadata )
- {
- metadataSuccesses.add( new RepositoryMetadataResult( metadata ) );
- }
-
- public void addWarning( RepositoryMetadata metadata, String message )
- {
- metadataWarnings.add( new RepositoryMetadataResult( metadata, message ) );
- }
-
- public Iterator getArtifactFailureIterator()
- {
- return artifactFailures.iterator();
- }
-
- public Iterator getArtifactSuccessIterator()
- {
- return artifactSuccesses.iterator();
- }
-
- public Iterator getArtifactWarningIterator()
- {
- return artifactWarnings.iterator();
- }
-
- public Iterator getRepositoryMetadataFailureIterator()
- {
- return metadataFailures.iterator();
- }
-
- public Iterator getRepositoryMetadataSuccessIterator()
- {
- return metadataSuccesses.iterator();
- }
-
- public Iterator getRepositoryMetadataWarningIterator()
- {
- return metadataWarnings.iterator();
- }
-
- public int getFailures()
- {
- return artifactFailures.size();
- }
-
- public int getSuccesses()
- {
- return artifactSuccesses.size();
- }
-
- public int getWarnings()
- {
- return artifactWarnings.size();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- */
-public class MockRepositoryQueryLayer
- implements RepositoryQueryLayer
-{
- private List queryConditions;
-
- private Iterator iterator;
-
- public MockRepositoryQueryLayer()
- {
- queryConditions = new ArrayList();
- }
-
- public boolean containsArtifact( Artifact artifact )
- {
- if ( iterator == null || !iterator.hasNext() ) // not initialized or reached end of the list. start again
- {
- iterator = queryConditions.iterator();
- }
- boolean b;
- if ( queryConditions.isEmpty() )
- {
- b = false;
- }
- else
- {
- b = ( (Boolean) iterator.next() ).booleanValue();
- }
- return b;
- }
-
- public void addReturnValue( boolean queryCondition )
- {
- queryConditions.add( Boolean.valueOf( queryCondition ) );
- }
-
- public void clearList()
- {
- queryConditions.clear();
- }
-
- public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
- {
- return containsArtifact( artifact );
- }
-
- public List getVersions( Artifact artifact )
- {
- return Collections.EMPTY_LIST;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.reporting;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-
-/**
- *
- */
-public class ReportCondition
-{
- public static final int SUCCESS = 0;
-
- public static final int FAILURE = -1;
-
- public static final int WARNING = 1;
-
- private int result;
-
- private Artifact artifact;
-
- private String reason;
-
- public ReportCondition( int result, Artifact artifact, String reason )
- {
- this.result = result;
- this.artifact = artifact;
- this.reason = reason;
- }
-
- public int getResult()
- {
- return result;
- }
-
- public void setResult( int result )
- {
- this.result = result;
- }
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public void setArtifact( Artifact artifact )
- {
- this.artifact = artifact;
- }
-
- public String getReason()
- {
- return reason;
- }
-
- public void setReason( String reason )
- {
- this.reason = reason;
- }
-}
--- /dev/null
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.reporting.ArtifactReportProcessor</role>
+ <role-hint>duplicate</role-hint>
+ <implementation>org.apache.maven.archiva.reporting.DuplicateArtifactFileReportProcessor</implementation>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.digest.Digester</role>
+ <role-hint>md5</role-hint>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory</role>
+ </requirement>
+ </requirements>
+ <configuration>
+ <indexDirectory>${basedir}/target/indexDirectory</indexDirectory>
+ </configuration>
+ </component>
+ </components>
+</component-set>
\ No newline at end of file
+++ /dev/null
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.repository.reporting.ArtifactReportProcessor</role>
- <role-hint>duplicate</role-hint>
- <implementation>org.apache.maven.repository.reporting.DuplicateArtifactFileReportProcessor</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.repository.digest.Digester</role>
- <role-hint>md5</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory</role>
- </requirement>
- </requirements>
- <configuration>
- <indexDirectory>${basedir}/target/indexDirectory</indexDirectory>
- </configuration>
- </component>
- </components>
-</component-set>
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.util.IOUtil;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * Create a digest for a file.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractDigester
+ implements Digester
+{
+ private final StreamingDigester streamingDigester;
+
+ protected AbstractDigester( StreamingDigester streamingDigester )
+ throws NoSuchAlgorithmException
+ {
+ this.streamingDigester = streamingDigester;
+ }
+
+ public String getAlgorithm()
+ {
+ return streamingDigester.getAlgorithm();
+ }
+
+ public String calc( File file )
+ throws DigesterException
+ {
+ FileInputStream fis = null;
+ try
+ {
+ fis = new FileInputStream( file );
+ streamingDigester.reset();
+ streamingDigester.update( fis );
+ return streamingDigester.calc();
+ }
+ catch ( IOException e )
+ {
+ throw new DigesterException( "Unable to calculate the " + streamingDigester.getAlgorithm() +
+ " hashcode for " + file.getAbsolutePath() + ": " + e.getMessage(), e );
+ }
+ finally
+ {
+ IOUtil.close( fis );
+ }
+ }
+
+ public void verify( File file, String checksum )
+ throws DigesterException
+ {
+ String trimmedChecksum =
+ DigestUtils.cleanChecksum( checksum, streamingDigester.getAlgorithm(), file.getName() );
+
+ //Create checksum for jar file
+ String sum = calc( file );
+ if ( !StringUtils.equalsIgnoreCase( trimmedChecksum, sum ) )
+ {
+ throw new DigesterException( "Checksum failed" );
+ }
+ }
+
+ public String toString()
+ {
+ return "[Digester:" + streamingDigester.getAlgorithm() + "]";
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * Gradually create a digest for a stream.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractStreamingDigester
+ implements StreamingDigester
+{
+ protected final MessageDigest md;
+
+ private static final char[] HEX_CHARS = "0123456789ABCDEF".toCharArray();
+
+ private static final int HI_MASK = 0xF0;
+
+ private static final int LO_MASK = 0x0F;
+
+ private static final int BUFFER_SIZE = 32768;
+
+ protected AbstractStreamingDigester( String algorithm )
+ throws NoSuchAlgorithmException
+ {
+ md = MessageDigest.getInstance( algorithm );
+ }
+
+ public String getAlgorithm()
+ {
+ return md.getAlgorithm();
+ }
+
+ public String calc()
+ throws DigesterException
+ {
+ return calc( this.md );
+ }
+
+ public void reset()
+ throws DigesterException
+ {
+ md.reset();
+ }
+
+ public void update( InputStream is )
+ throws DigesterException
+ {
+ update( is, md );
+ }
+
+ protected static String calc( MessageDigest md )
+ {
+ byte[] digest = md.digest();
+
+ char[] hash = new char[digest.length * 2];
+ for ( int i = 0; i < digest.length; i++ )
+ {
+ hash[i * 2] = HEX_CHARS[( digest[i] & HI_MASK ) >> 4];
+ hash[i * 2 + 1] = HEX_CHARS[( digest[i] & LO_MASK )];
+ }
+ return new String( hash );
+ }
+
+ protected static void update( InputStream is, MessageDigest digest )
+ throws DigesterException
+ {
+ try
+ {
+ byte[] buffer = new byte[BUFFER_SIZE];
+ int size = is.read( buffer, 0, BUFFER_SIZE );
+ while ( size >= 0 )
+ {
+ digest.update( buffer, 0, size );
+ size = is.read( buffer, 0, BUFFER_SIZE );
+ }
+ }
+ catch ( IOException e )
+ {
+ throw new DigesterException( "Unable to update " + digest.getAlgorithm() + " hash: " + e.getMessage(), e );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Parse files from checksum file formats.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class DigestUtils
+{
+ private DigestUtils()
+ {
+ // don't create this class
+ }
+
+ public static String cleanChecksum( String checksum, String algorithm, String path )
+ throws DigesterException
+ {
+ String trimmedChecksum = checksum.replace( '\n', ' ' ).trim();
+
+ // Free-BSD / openssl
+ String regex = algorithm.replaceAll( "-", "" ) + "\\s*\\((.*?)\\)\\s*=\\s*([a-fA-F0-9]+)";
+ Matcher m = Pattern.compile( regex ).matcher( trimmedChecksum );
+ if ( m.matches() )
+ {
+ String filename = m.group( 1 );
+ if ( !path.endsWith( filename ) )
+ {
+ throw new DigesterException( "Supplied checksum does not match checksum pattern" );
+ }
+ trimmedChecksum = m.group( 2 );
+ }
+ else
+ {
+ // GNU tools
+ m = Pattern.compile( "([a-fA-F0-9]+)\\s\\*?(.+)" ).matcher( trimmedChecksum );
+ if ( m.matches() )
+ {
+ String filename = m.group( 2 );
+ if ( !path.endsWith( filename ) )
+ {
+ throw new DigesterException( "Supplied checksum does not match checksum pattern" );
+ }
+ trimmedChecksum = m.group( 1 );
+ }
+ }
+ return trimmedChecksum;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+
+/**
+ * Create a digest for a file.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface Digester
+{
+ String ROLE = Digester.class.getName();
+
+ /**
+ * Get the algorithm used for the checksum.
+ *
+ * @return the algorithm
+ */
+ String getAlgorithm();
+
+ /**
+ * Calculate a checksum for a file.
+ *
+ * @param file the file to calculate the checksum for
+ * @return the current checksum.
+ * @throws DigesterException if there was a problem computing the hashcode.
+ */
+ String calc( File file )
+ throws DigesterException;
+
+ /**
+ * Verify that a checksum is correct.
+ *
+ * @param file the file to compute the checksum for
+ * @param checksum the checksum to compare to
+ * @throws DigesterException if there was a problem computing the hashcode.
+ */
+ void verify( File file, String checksum )
+ throws DigesterException;
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @author Edwin Punzalan
+ */
+public class DigesterException
+ extends Exception
+{
+ public DigesterException( String message )
+ {
+ super( message );
+ }
+
+ public DigesterException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * Digester that does MD5 Message Digesting Only.
+ *
+ * @plexus.component role="org.apache.maven.archiva.digest.Digester" role-hint="md5"
+ */
+public class Md5Digester
+ extends AbstractDigester
+{
+ public Md5Digester()
+ throws NoSuchAlgorithmException
+ {
+ super( new StreamingMd5Digester() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * Digester that does SHA1 Message Digesting Only.
+ *
+ * @plexus.component role="org.apache.maven.archiva.digest.Digester" role-hint="sha1"
+ */
+public class Sha1Digester
+ extends AbstractDigester
+{
+ public Sha1Digester()
+ throws NoSuchAlgorithmException
+ {
+ super( new StreamingSha1Digester() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.InputStream;
+
+/**
+ * Gradually create a digest for a stream.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public interface StreamingDigester
+{
+ String ROLE = StreamingDigester.class.getName();
+
+ /**
+ * Get the algorithm used for the checksum.
+ *
+ * @return the algorithm
+ */
+ String getAlgorithm();
+
+ /**
+ * Reset the hashcode calculation algorithm.
+ * Only useful when performing incremental hashcodes based on repeated use of {@link #update(InputStream)}
+ *
+ * @throws DigesterException if there was a problem with the internal message digest
+ */
+ void reset()
+ throws DigesterException;
+
+ /**
+ * Calculate the current checksum.
+ *
+ * @return the current checksum.
+ * @throws DigesterException if there was a problem computing the hashcode.
+ */
+ String calc()
+ throws DigesterException;
+
+ /**
+ * Update the checksum with the content of the input stream.
+ *
+ * @param is the input stream
+ * @throws DigesterException if there was a problem computing the hashcode.
+ */
+ void update( InputStream is )
+ throws DigesterException;
+
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * An MD5 implementation of the streaming digester.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.digest.StreamingDigester" role-hint="md5"
+ */
+public class StreamingMd5Digester
+ extends AbstractStreamingDigester
+{
+ public StreamingMd5Digester()
+ throws NoSuchAlgorithmException
+ {
+ super( "MD5" );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * An SHA-1 implementation of the streaming digester.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @plexus.component role="org.apache.maven.archiva.digest.StreamingDigester" role-hint="sha1"
+ */
+public class StreamingSha1Digester
+ extends AbstractStreamingDigester
+{
+ public StreamingSha1Digester()
+ throws NoSuchAlgorithmException
+ {
+ super( "SHA-1" );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.util.IOUtil;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Create a digest for a file.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDigester
- implements Digester
-{
- private final StreamingDigester streamingDigester;
-
- protected AbstractDigester( StreamingDigester streamingDigester )
- throws NoSuchAlgorithmException
- {
- this.streamingDigester = streamingDigester;
- }
-
- public String getAlgorithm()
- {
- return streamingDigester.getAlgorithm();
- }
-
- public String calc( File file )
- throws DigesterException
- {
- FileInputStream fis = null;
- try
- {
- fis = new FileInputStream( file );
- streamingDigester.reset();
- streamingDigester.update( fis );
- return streamingDigester.calc();
- }
- catch ( IOException e )
- {
- throw new DigesterException( "Unable to calculate the " + streamingDigester.getAlgorithm() +
- " hashcode for " + file.getAbsolutePath() + ": " + e.getMessage(), e );
- }
- finally
- {
- IOUtil.close( fis );
- }
- }
-
- public void verify( File file, String checksum )
- throws DigesterException
- {
- String trimmedChecksum =
- DigestUtils.cleanChecksum( checksum, streamingDigester.getAlgorithm(), file.getName() );
-
- //Create checksum for jar file
- String sum = calc( file );
- if ( !StringUtils.equalsIgnoreCase( trimmedChecksum, sum ) )
- {
- throw new DigesterException( "Checksum failed" );
- }
- }
-
- public String toString()
- {
- return "[Digester:" + streamingDigester.getAlgorithm() + "]";
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Gradually create a digest for a stream.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractStreamingDigester
- implements StreamingDigester
-{
- protected final MessageDigest md;
-
- private static final char[] HEX_CHARS = "0123456789ABCDEF".toCharArray();
-
- private static final int HI_MASK = 0xF0;
-
- private static final int LO_MASK = 0x0F;
-
- private static final int BUFFER_SIZE = 32768;
-
- protected AbstractStreamingDigester( String algorithm )
- throws NoSuchAlgorithmException
- {
- md = MessageDigest.getInstance( algorithm );
- }
-
- public String getAlgorithm()
- {
- return md.getAlgorithm();
- }
-
- public String calc()
- throws DigesterException
- {
- return calc( this.md );
- }
-
- public void reset()
- throws DigesterException
- {
- md.reset();
- }
-
- public void update( InputStream is )
- throws DigesterException
- {
- update( is, md );
- }
-
- protected static String calc( MessageDigest md )
- {
- byte[] digest = md.digest();
-
- char[] hash = new char[digest.length * 2];
- for ( int i = 0; i < digest.length; i++ )
- {
- hash[i * 2] = HEX_CHARS[( digest[i] & HI_MASK ) >> 4];
- hash[i * 2 + 1] = HEX_CHARS[( digest[i] & LO_MASK )];
- }
- return new String( hash );
- }
-
- protected static void update( InputStream is, MessageDigest digest )
- throws DigesterException
- {
- try
- {
- byte[] buffer = new byte[BUFFER_SIZE];
- int size = is.read( buffer, 0, BUFFER_SIZE );
- while ( size >= 0 )
- {
- digest.update( buffer, 0, size );
- size = is.read( buffer, 0, BUFFER_SIZE );
- }
- }
- catch ( IOException e )
- {
- throw new DigesterException( "Unable to update " + digest.getAlgorithm() + " hash: " + e.getMessage(), e );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Parse files from checksum file formats.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class DigestUtils
-{
- private DigestUtils()
- {
- // don't create this class
- }
-
- public static String cleanChecksum( String checksum, String algorithm, String path )
- throws DigesterException
- {
- String trimmedChecksum = checksum.replace( '\n', ' ' ).trim();
-
- // Free-BSD / openssl
- String regex = algorithm.replaceAll( "-", "" ) + "\\s*\\((.*?)\\)\\s*=\\s*([a-fA-F0-9]+)";
- Matcher m = Pattern.compile( regex ).matcher( trimmedChecksum );
- if ( m.matches() )
- {
- String filename = m.group( 1 );
- if ( !path.endsWith( filename ) )
- {
- throw new DigesterException( "Supplied checksum does not match checksum pattern" );
- }
- trimmedChecksum = m.group( 2 );
- }
- else
- {
- // GNU tools
- m = Pattern.compile( "([a-fA-F0-9]+)\\s\\*?(.+)" ).matcher( trimmedChecksum );
- if ( m.matches() )
- {
- String filename = m.group( 2 );
- if ( !path.endsWith( filename ) )
- {
- throw new DigesterException( "Supplied checksum does not match checksum pattern" );
- }
- trimmedChecksum = m.group( 1 );
- }
- }
- return trimmedChecksum;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.File;
-import java.io.InputStream;
-
-/**
- * Create a digest for a file.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface Digester
-{
- String ROLE = Digester.class.getName();
-
- /**
- * Get the algorithm used for the checksum.
- *
- * @return the algorithm
- */
- String getAlgorithm();
-
- /**
- * Calculate a checksum for a file.
- *
- * @param file the file to calculate the checksum for
- * @return the current checksum.
- * @throws DigesterException if there was a problem computing the hashcode.
- */
- String calc( File file )
- throws DigesterException;
-
- /**
- * Verify that a checksum is correct.
- *
- * @param file the file to compute the checksum for
- * @param checksum the checksum to compare to
- * @throws DigesterException if there was a problem computing the hashcode.
- */
- void verify( File file, String checksum )
- throws DigesterException;
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @author Edwin Punzalan
- */
-public class DigesterException
- extends Exception
-{
- public DigesterException( String message )
- {
- super( message );
- }
-
- public DigesterException( String message, Throwable cause )
- {
- super( message, cause );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Digester that does MD5 Message Digesting Only.
- *
- * @plexus.component role="org.apache.maven.repository.digest.Digester" role-hint="md5"
- */
-public class Md5Digester
- extends AbstractDigester
-{
- public Md5Digester()
- throws NoSuchAlgorithmException
- {
- super( new StreamingMd5Digester() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Digester that does SHA1 Message Digesting Only.
- *
- * @plexus.component role="org.apache.maven.repository.digest.Digester" role-hint="sha1"
- */
-public class Sha1Digester
- extends AbstractDigester
-{
- public Sha1Digester()
- throws NoSuchAlgorithmException
- {
- super( new StreamingSha1Digester() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.InputStream;
-
-/**
- * Gradually create a digest for a stream.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface StreamingDigester
-{
- String ROLE = StreamingDigester.class.getName();
-
- /**
- * Get the algorithm used for the checksum.
- *
- * @return the algorithm
- */
- String getAlgorithm();
-
- /**
- * Reset the hashcode calculation algorithm.
- * Only useful when performing incremental hashcodes based on repeated use of {@link #update(InputStream)}
- *
- * @throws DigesterException if there was a problem with the internal message digest
- */
- void reset()
- throws DigesterException;
-
- /**
- * Calculate the current checksum.
- *
- * @return the current checksum.
- * @throws DigesterException if there was a problem computing the hashcode.
- */
- String calc()
- throws DigesterException;
-
- /**
- * Update the checksum with the content of the input stream.
- *
- * @param is the input stream
- * @throws DigesterException if there was a problem computing the hashcode.
- */
- void update( InputStream is )
- throws DigesterException;
-
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.security.NoSuchAlgorithmException;
-
-/**
- * An MD5 implementation of the streaming digester.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.digest.StreamingDigester" role-hint="md5"
- */
-public class StreamingMd5Digester
- extends AbstractStreamingDigester
-{
- public StreamingMd5Digester()
- throws NoSuchAlgorithmException
- {
- super( "MD5" );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.security.NoSuchAlgorithmException;
-
-/**
- * An SHA-1 implementation of the streaming digester.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.repository.digest.StreamingDigester" role-hint="sha1"
- */
-public class StreamingSha1Digester
- extends AbstractStreamingDigester
-{
- public StreamingSha1Digester()
- throws NoSuchAlgorithmException
- {
- super( "SHA-1" );
- }
-}
--- /dev/null
+package org.apache.maven.archiva.digest;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.codehaus.plexus.PlexusTestCase;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * Test the digester.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class DigesterTest
+ extends PlexusTestCase
+{
+ private static final String MD5 = "adbc688ce77fa2aece4bb72cad9f98ba";
+
+ private static final String SHA1 = "2a7b459938e12a2dc35d1bf6cff35e9c2b592fa9";
+
+ private static final String WRONG_SHA1 = "4d8703779816556cdb8be7f6bb5c954f4b5730e2";
+
+ private Digester sha1Digest;
+
+ private Digester md5Digest;
+
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
+ md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
+ }
+
+ public void testAlgorithm()
+ {
+ assertEquals( "SHA-1", sha1Digest.getAlgorithm() );
+ assertEquals( "MD5", md5Digest.getAlgorithm() );
+ }
+
+ public void testBareDigestFormat()
+ throws DigesterException, IOException
+ {
+ File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
+
+ try
+ {
+ md5Digest.verify( file, MD5 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "Bare format MD5 must not throw exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, SHA1 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "Bare format SHA1 must not throw exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, WRONG_SHA1 );
+ fail( "wrong checksum must throw an exception" );
+ }
+ catch ( DigesterException e )
+ {
+ //expected
+ }
+ }
+
+ public void testOpensslDigestFormat()
+ throws IOException, DigesterException
+ {
+ File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
+
+ try
+ {
+ md5Digest.verify( file, "MD5(test-file.txt)= " + MD5 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "OpenSSL MD5 format must not cause exception" );
+ }
+
+ try
+ {
+ md5Digest.verify( file, "MD5 (test-file.txt) = " + MD5 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "FreeBSD MD5 format must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, "SHA1(test-file.txt)= " + SHA1 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "OpenSSL SHA1 format must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, "SHA1 (test-file.txt) = " + SHA1 );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "FreeBSD SHA1 format must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, "SHA1 (FOO) = " + SHA1 );
+ fail( "Wrong filename should cause an exception" );
+ }
+ catch ( DigesterException e )
+ {
+ //expected
+ }
+
+ try
+ {
+ sha1Digest.verify( file, "SHA1 (test-file.txt) = " + WRONG_SHA1 );
+ fail( "Wrong sha1 should cause an exception" );
+ }
+ catch ( DigesterException e )
+ {
+ //expected
+ }
+ }
+
+ public void testGnuDigestFormat()
+ throws NoSuchAlgorithmException, IOException, DigesterException
+ {
+ File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
+
+ try
+ {
+ md5Digest.verify( file, MD5 + " *test-file.txt" );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "GNU format MD5 must not cause exception" );
+ }
+
+ try
+ {
+ md5Digest.verify( file, MD5 + " test-file.txt" );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "GNU text format MD5 must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, SHA1 + " *test-file.txt" );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "GNU format SHA1 must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, SHA1 + " test-file.txt" );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "GNU text format SHA1 must not cause exception" );
+ }
+
+ try
+ {
+ sha1Digest.verify( file, SHA1 + " FOO" );
+ fail( "Wrong filename cause an exception" );
+ }
+ catch ( DigesterException e )
+ {
+ //expected
+ }
+
+ try
+ {
+ sha1Digest.verify( file, WRONG_SHA1 + " test-file.txt" );
+ fail( "Wrong SHA1 cause an exception" );
+ }
+ catch ( DigesterException e )
+ {
+ //expected
+ }
+ }
+
+ public void testUntrimmedContent()
+ throws NoSuchAlgorithmException, IOException
+ {
+ File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
+ try
+ {
+ sha1Digest.verify( file, SHA1 + " *test-file.txt \n" );
+ }
+ catch ( DigesterException e )
+ {
+ fail( "GNU untrimmed SHA1 must not cause exception" );
+ }
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.digest;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.codehaus.plexus.PlexusTestCase;
-
-import java.io.File;
-import java.io.IOException;
-import java.security.NoSuchAlgorithmException;
-
-/**
- * Test the digester.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class DigesterTest
- extends PlexusTestCase
-{
- private static final String MD5 = "adbc688ce77fa2aece4bb72cad9f98ba";
-
- private static final String SHA1 = "2a7b459938e12a2dc35d1bf6cff35e9c2b592fa9";
-
- private static final String WRONG_SHA1 = "4d8703779816556cdb8be7f6bb5c954f4b5730e2";
-
- private Digester sha1Digest;
-
- private Digester md5Digest;
-
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- sha1Digest = (Digester) lookup( Digester.ROLE, "sha1" );
- md5Digest = (Digester) lookup( Digester.ROLE, "md5" );
- }
-
- public void testAlgorithm()
- {
- assertEquals( "SHA-1", sha1Digest.getAlgorithm() );
- assertEquals( "MD5", md5Digest.getAlgorithm() );
- }
-
- public void testBareDigestFormat()
- throws DigesterException, IOException
- {
- File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
-
- try
- {
- md5Digest.verify( file, MD5 );
- }
- catch ( DigesterException e )
- {
- fail( "Bare format MD5 must not throw exception" );
- }
-
- try
- {
- sha1Digest.verify( file, SHA1 );
- }
- catch ( DigesterException e )
- {
- fail( "Bare format SHA1 must not throw exception" );
- }
-
- try
- {
- sha1Digest.verify( file, WRONG_SHA1 );
- fail( "wrong checksum must throw an exception" );
- }
- catch ( DigesterException e )
- {
- //expected
- }
- }
-
- public void testOpensslDigestFormat()
- throws IOException, DigesterException
- {
- File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
-
- try
- {
- md5Digest.verify( file, "MD5(test-file.txt)= " + MD5 );
- }
- catch ( DigesterException e )
- {
- fail( "OpenSSL MD5 format must not cause exception" );
- }
-
- try
- {
- md5Digest.verify( file, "MD5 (test-file.txt) = " + MD5 );
- }
- catch ( DigesterException e )
- {
- fail( "FreeBSD MD5 format must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, "SHA1(test-file.txt)= " + SHA1 );
- }
- catch ( DigesterException e )
- {
- fail( "OpenSSL SHA1 format must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, "SHA1 (test-file.txt) = " + SHA1 );
- }
- catch ( DigesterException e )
- {
- fail( "FreeBSD SHA1 format must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, "SHA1 (FOO) = " + SHA1 );
- fail( "Wrong filename should cause an exception" );
- }
- catch ( DigesterException e )
- {
- //expected
- }
-
- try
- {
- sha1Digest.verify( file, "SHA1 (test-file.txt) = " + WRONG_SHA1 );
- fail( "Wrong sha1 should cause an exception" );
- }
- catch ( DigesterException e )
- {
- //expected
- }
- }
-
- public void testGnuDigestFormat()
- throws NoSuchAlgorithmException, IOException, DigesterException
- {
- File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
-
- try
- {
- md5Digest.verify( file, MD5 + " *test-file.txt" );
- }
- catch ( DigesterException e )
- {
- fail( "GNU format MD5 must not cause exception" );
- }
-
- try
- {
- md5Digest.verify( file, MD5 + " test-file.txt" );
- }
- catch ( DigesterException e )
- {
- fail( "GNU text format MD5 must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, SHA1 + " *test-file.txt" );
- }
- catch ( DigesterException e )
- {
- fail( "GNU format SHA1 must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, SHA1 + " test-file.txt" );
- }
- catch ( DigesterException e )
- {
- fail( "GNU text format SHA1 must not cause exception" );
- }
-
- try
- {
- sha1Digest.verify( file, SHA1 + " FOO" );
- fail( "Wrong filename cause an exception" );
- }
- catch ( DigesterException e )
- {
- //expected
- }
-
- try
- {
- sha1Digest.verify( file, WRONG_SHA1 + " test-file.txt" );
- fail( "Wrong SHA1 cause an exception" );
- }
- catch ( DigesterException e )
- {
- //expected
- }
- }
-
- public void testUntrimmedContent()
- throws NoSuchAlgorithmException, IOException
- {
- File file = new File( getClass().getResource( "/test-file.txt" ).getPath() );
- try
- {
- sha1Digest.verify( file, SHA1 + " *test-file.txt \n" );
- }
- catch ( DigesterException e )
- {
- fail( "GNU untrimmed SHA1 must not cause exception" );
- }
- }
-}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.lucene.LuceneQuery;
+import org.apache.maven.archiva.indexing.record.StandardArtifactIndexRecord;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+import org.codehaus.plexus.util.StringUtils;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+/**
+ * Browse the repository.
+ *
+ * @todo the tree part probably belongs in a browsing component, and the indexer could optimize how it retrieves the terms rather than querying everything
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="browseAction"
+ */
+public class BrowseAction
+ extends ActionSupport
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory factory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repositoryFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ private List groups;
+
+ private String groupId;
+
+ private static final String GROUP_SEPARATOR = ".";
+
+ private List artifactIds;
+
+ private String artifactId;
+
+ private List versions;
+
+ public String browse()
+ throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
+ {
+ RepositoryArtifactIndex index = getIndex();
+
+ if ( !index.exists() )
+ {
+ addActionError( "The repository is not yet indexed. Please wait, and then try again." );
+ return ERROR;
+ }
+
+ GroupTreeNode rootNode = buildGroupTree( index );
+
+ this.groups = collateGroups( rootNode );
+
+ return SUCCESS;
+ }
+
+ public String browseGroup()
+ throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
+ {
+ RepositoryArtifactIndex index = getIndex();
+
+ if ( !index.exists() )
+ {
+ addActionError( "The repository is not yet indexed. Please wait, and then try again." );
+ return ERROR;
+ }
+
+ GroupTreeNode rootNode = buildGroupTree( index );
+
+ if ( StringUtils.isEmpty( groupId ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a group ID to browse" );
+ return ERROR;
+ }
+
+ StringTokenizer tok = new StringTokenizer( groupId, GROUP_SEPARATOR );
+ while ( tok.hasMoreTokens() )
+ {
+ String part = tok.nextToken();
+
+ if ( !rootNode.getChildren().containsKey( part ) )
+ {
+ // TODO: i18n
+ addActionError( "The group specified was not found" );
+ return ERROR;
+ }
+ else
+ {
+ rootNode = (GroupTreeNode) rootNode.getChildren().get( part );
+ }
+ }
+
+ this.groups = collateGroups( rootNode );
+
+ List records = index.search(
+ new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ) ) );
+
+ Set artifactIds = new HashSet();
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
+ artifactIds.add( record.getArtifactId() );
+ }
+ this.artifactIds = new ArrayList( artifactIds );
+ Collections.sort( this.artifactIds );
+
+ return SUCCESS;
+ }
+
+ public String browseArtifact()
+ throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
+ {
+ RepositoryArtifactIndex index = getIndex();
+
+ if ( StringUtils.isEmpty( groupId ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a group ID to browse" );
+ return ERROR;
+ }
+
+ if ( StringUtils.isEmpty( artifactId ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a artifact ID to browse" );
+ return ERROR;
+ }
+
+ BooleanQuery query = new BooleanQuery();
+ query.add( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
+ BooleanClause.Occur.MUST );
+ query.add( new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, artifactId ) ),
+ BooleanClause.Occur.MUST );
+
+ List records = index.search( new LuceneQuery( query ) );
+
+ if ( records.isEmpty() )
+ {
+ // TODO: i18n
+ addActionError( "Could not find any artifacts with the given group and artifact ID" );
+ return ERROR;
+ }
+
+ Set versions = new HashSet();
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
+ versions.add( record.getVersion() );
+ }
+
+ this.versions = new ArrayList( versions );
+ Collections.sort( this.versions );
+
+ return SUCCESS;
+ }
+
+ private GroupTreeNode buildGroupTree( RepositoryArtifactIndex index )
+ throws IOException, RepositoryIndexSearchException
+ {
+ // TODO: give action message if indexing is in progress
+
+ // TODO: this will be inefficient over a very large number of artifacts, should be cached
+
+ List records = index.search( new LuceneQuery( new MatchAllDocsQuery() ) );
+
+ Set groups = new TreeSet();
+ for ( Iterator i = records.iterator(); i.hasNext(); )
+ {
+ StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
+ groups.add( record.getGroupId() );
+ }
+
+ GroupTreeNode rootNode = new GroupTreeNode();
+
+ // build a tree structure
+ for ( Iterator i = groups.iterator(); i.hasNext(); )
+ {
+ String groupId = (String) i.next();
+
+ StringTokenizer tok = new StringTokenizer( groupId, GROUP_SEPARATOR );
+
+ GroupTreeNode node = rootNode;
+
+ while ( tok.hasMoreTokens() )
+ {
+ String part = tok.nextToken();
+
+ if ( !node.getChildren().containsKey( part ) )
+ {
+ GroupTreeNode newNode = new GroupTreeNode( part, node );
+ node.addChild( newNode );
+ node = newNode;
+ }
+ else
+ {
+ node = (GroupTreeNode) node.getChildren().get( part );
+ }
+ }
+ }
+ return rootNode;
+ }
+
+ private List collateGroups( GroupTreeNode rootNode )
+ {
+ List groups = new ArrayList();
+ for ( Iterator i = rootNode.getChildren().values().iterator(); i.hasNext(); )
+ {
+ GroupTreeNode node = (GroupTreeNode) i.next();
+
+ while ( node.getChildren().size() == 1 )
+ {
+ node = (GroupTreeNode) node.getChildren().values().iterator().next();
+ }
+
+ groups.add( node.getFullName() );
+ }
+ return groups;
+ }
+
+ private RepositoryArtifactIndex getIndex()
+ throws ConfigurationStoreException, RepositoryIndexException
+ {
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+ File indexPath = new File( configuration.getIndexPath() );
+
+ return factory.createStandardIndex( indexPath );
+ }
+
+ public List getGroups()
+ {
+ return groups;
+ }
+
+ public List getArtifactIds()
+ {
+ return artifactIds;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public List getVersions()
+ {
+ return versions;
+ }
+
+ private static class GroupTreeNode
+ {
+ private final String name;
+
+ private final String fullName;
+
+ private final Map children = new TreeMap();
+
+ GroupTreeNode()
+ {
+ name = null;
+ fullName = null;
+ }
+
+ GroupTreeNode( String name, GroupTreeNode parent )
+ {
+ this.name = name;
+ this.fullName = parent.fullName != null ? parent.fullName + GROUP_SEPARATOR + name : name;
+ }
+
+ public String getName()
+ {
+ return name;
+ }
+
+ public String getFullName()
+ {
+ return fullName;
+ }
+
+ public Map getChildren()
+ {
+ return children;
+ }
+
+ public void addChild( GroupTreeNode newNode )
+ {
+ children.put( newNode.name, newNode );
+ }
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import org.apache.maven.archiva.proxy.ProxyException;
+import org.apache.maven.archiva.proxy.ProxyManager;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+
+/**
+ * Proxy functionality.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="proxyAction"
+ */
+public class ProxyAction
+ extends ActionSupport
+{
+ /**
+ * @plexus.requirement
+ */
+ private ProxyManager proxyManager;
+
+ private String path;
+
+ private String filename;
+
+ private String contentType;
+
+ private static final String NOT_FOUND = "notFound";
+
+ private InputStream artifactStream;
+
+ public String execute()
+ throws ProxyException
+ {
+ try
+ {
+ File file = proxyManager.get( path );
+
+ artifactStream = new FileInputStream( file );
+
+ // TODO: could be better
+ contentType = "application/octet-stream";
+
+ filename = file.getName();
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ // TODO: set message?
+ return NOT_FOUND;
+ }
+ catch ( FileNotFoundException e )
+ {
+ // TODO: set message?
+ return NOT_FOUND;
+ }
+
+ return SUCCESS;
+ }
+
+ public String getPath()
+ {
+ return path;
+ }
+
+ public void setPath( String path )
+ {
+ this.path = path;
+ }
+
+ public String getFilename()
+ {
+ return filename;
+ }
+
+ public String getContentType()
+ {
+ return contentType;
+ }
+
+ public InputStream getArtifactStream()
+ {
+ return artifactStream;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryParser.MultiFieldQueryParser;
+import org.apache.lucene.queryParser.ParseException;
+import org.apache.lucene.search.TermQuery;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndex;
+import org.apache.maven.archiva.indexing.RepositoryArtifactIndexFactory;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+import org.apache.maven.archiva.indexing.lucene.LuceneQuery;
+import org.apache.maven.archiva.indexing.record.StandardIndexRecordFields;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.util.List;
+
+/**
+ * Search all indexed fields by the given criteria.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="searchAction"
+ */
+public class SearchAction
+ extends ActionSupport
+{
+ /**
+ * Query string.
+ */
+ private String q;
+
+ /**
+ * The MD5 to search by.
+ */
+ private String md5;
+
+ /**
+ * Search results.
+ */
+ private List searchResults;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryArtifactIndexFactory factory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repositoryFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ private static final String NO_RESULTS = "noResults";
+
+ private static final String RESULTS = "results";
+
+ private static final String ARTIFACT = "artifact";
+
+ public String quickSearch()
+ throws MalformedURLException, RepositoryIndexException, RepositoryIndexSearchException,
+ ConfigurationStoreException, ParseException
+ {
+ // TODO: give action message if indexing is in progress
+
+ assert q != null && q.length() != 0;
+
+ RepositoryArtifactIndex index = getIndex();
+
+ if ( !index.exists() )
+ {
+ addActionError( "The repository is not yet indexed. Please wait, and then try again." );
+ return ERROR;
+ }
+
+ // TODO! this is correct, but ugly
+ MultiFieldQueryParser parser = new MultiFieldQueryParser( new String[]{StandardIndexRecordFields.GROUPID,
+ StandardIndexRecordFields.ARTIFACTID, StandardIndexRecordFields.BASE_VERSION,
+ StandardIndexRecordFields.CLASSIFIER, StandardIndexRecordFields.CLASSES, StandardIndexRecordFields.FILES,
+ StandardIndexRecordFields.TYPE, StandardIndexRecordFields.PROJECT_NAME,
+ StandardIndexRecordFields.PROJECT_DESCRIPTION}, new StandardAnalyzer() );
+ searchResults = index.search( new LuceneQuery( parser.parse( q ) ) );
+
+ return SUCCESS;
+ }
+
+ public String findArtifact()
+ throws Exception
+ {
+ // TODO: give action message if indexing is in progress
+
+ assert md5 != null && md5.length() != 0;
+
+ RepositoryArtifactIndex index = getIndex();
+
+ if ( !index.exists() )
+ {
+ addActionError( "The repository is not yet indexed. Please wait, and then try again." );
+ return ERROR;
+ }
+
+ searchResults = index.search(
+ new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, md5.toLowerCase() ) ) ) );
+
+ if ( searchResults.isEmpty() )
+ {
+ return NO_RESULTS;
+ }
+ if ( searchResults.size() == 1 )
+ {
+ return ARTIFACT;
+ }
+ else
+ {
+ return RESULTS;
+ }
+ }
+
+ private RepositoryArtifactIndex getIndex()
+ throws ConfigurationStoreException, RepositoryIndexException
+ {
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+ File indexPath = new File( configuration.getIndexPath() );
+
+ return factory.createStandardIndex( indexPath );
+ }
+
+ public String doInput()
+ {
+ return INPUT;
+ }
+
+ public String getQ()
+ {
+ return q;
+ }
+
+ public void setQ( String q )
+ {
+ this.q = q;
+ }
+
+ public String getMd5()
+ {
+ return md5;
+ }
+
+ public void setMd5( String md5 )
+ {
+ this.md5 = md5;
+ }
+
+ public List getSearchResults()
+ {
+ return searchResults;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
+import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.factory.ArtifactFactory;
+import org.apache.maven.artifact.repository.ArtifactRepository;
+import org.apache.maven.model.Model;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.project.MavenProjectBuilder;
+import org.apache.maven.project.ProjectBuildingException;
+import org.codehaus.plexus.util.StringUtils;
+import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * Browse the repository.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="showArtifactAction"
+ */
+public class ShowArtifactAction
+ extends ActionSupport
+{
+ /**
+ * @plexus.requirement
+ */
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfiguredRepositoryFactory repositoryFactory;
+
+ /**
+ * @plexus.requirement
+ */
+ private MavenProjectBuilder projectBuilder;
+
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ private String groupId;
+
+ private String artifactId;
+
+ private String version;
+
+ private Model model;
+
+ public String execute()
+ throws ConfigurationStoreException, IOException, XmlPullParserException, ProjectBuildingException
+ {
+ if ( StringUtils.isEmpty( groupId ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a group ID to browse" );
+ return ERROR;
+ }
+
+ if ( StringUtils.isEmpty( artifactId ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a artifact ID to browse" );
+ return ERROR;
+ }
+
+ if ( StringUtils.isEmpty( version ) )
+ {
+ // TODO: i18n
+ addActionError( "You must specify a version to browse" );
+ return ERROR;
+ }
+
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+ List repositories = repositoryFactory.createRepositories( configuration );
+
+ Artifact artifact = artifactFactory.createProjectArtifact( groupId, artifactId, version );
+ // TODO: maybe we can decouple the assembly parts of the project builder from the repository handling to get rid of the temp repo
+ ArtifactRepository localRepository = repositoryFactory.createLocalRepository( configuration );
+ MavenProject project = projectBuilder.buildFromRepository( artifact, repositories, localRepository );
+
+ model = project.getModel();
+
+ return SUCCESS;
+ }
+
+ public Model getModel()
+ {
+ return model;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public void setVersion( String version )
+ {
+ this.version = version;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import com.opensymphony.xwork.ModelDriven;
+import com.opensymphony.xwork.Preparable;
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationChangeException;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.InvalidConfigurationException;
+
+import java.io.IOException;
+
+/**
+ * Base action for repository configuration actions.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractConfigureRepositoryAction
+ extends ActionSupport
+ implements ModelDriven, Preparable
+{
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ /**
+ * The repository.
+ */
+ private AbstractRepositoryConfiguration repository;
+
+ /**
+ * The repository ID to lookup when editing a repository.
+ */
+ private String repoId;
+
+ /**
+ * The previously read configuration.
+ */
+ protected Configuration configuration;
+
+ public String add()
+ throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
+ {
+ // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
+
+ AbstractRepositoryConfiguration existingRepository = getRepository( repository.getId() );
+ if ( existingRepository != null )
+ {
+ addFieldError( "id", "A repository with that id already exists" );
+ return INPUT;
+ }
+
+ return saveConfiguration();
+ }
+
+ public String edit()
+ throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
+ {
+ // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
+
+ AbstractRepositoryConfiguration existingRepository = getRepository( repository.getId() );
+ removeRepository( existingRepository );
+
+ return saveConfiguration();
+ }
+
+ protected abstract void removeRepository( AbstractRepositoryConfiguration existingRepository );
+
+ protected abstract AbstractRepositoryConfiguration getRepository( String id );
+
+ private String saveConfiguration()
+ throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
+ {
+ addRepository();
+
+ configurationStore.storeConfiguration( configuration );
+
+ // TODO: do we need to check if indexing is needed?
+
+ addActionMessage( "Successfully saved configuration" );
+
+ return SUCCESS;
+ }
+
+ protected abstract void addRepository()
+ throws IOException;
+
+ public String input()
+ {
+ return INPUT;
+ }
+
+ public Object getModel()
+ {
+ return repository;
+ }
+
+ protected abstract AbstractRepositoryConfiguration createRepository();
+
+ public void prepare()
+ throws ConfigurationStoreException
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+
+ if ( repository == null )
+ {
+ repository = getRepository( repoId );
+ }
+ if ( repository == null )
+ {
+ repository = createRepository();
+ }
+ }
+
+ public String getRepoId()
+ {
+ return repoId;
+ }
+
+ public void setRepoId( String repoId )
+ {
+ this.repoId = repoId;
+ }
+
+ protected AbstractRepositoryConfiguration getRepository()
+ {
+ return repository;
+ }
+
+ public Configuration getConfiguration()
+ {
+ return configuration;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationChangeException;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.InvalidConfigurationException;
+import org.codehaus.plexus.xwork.action.PlexusActionSupport;
+
+import java.io.IOException;
+
+/**
+ * Base action for repository removal actions.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public abstract class AbstractDeleteRepositoryAction
+ extends PlexusActionSupport
+{
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ /**
+ * The repository ID to lookup when editing a repository.
+ */
+ protected String repoId;
+
+ /**
+ * Which operation to select.
+ */
+ private String operation = "unmodified";
+
+ public String execute()
+ throws ConfigurationStoreException, IOException, InvalidConfigurationException, ConfigurationChangeException
+ {
+ // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
+
+ if ( "delete-entry".equals( operation ) || "delete-contents".equals( operation ) )
+ {
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+
+ AbstractRepositoryConfiguration existingRepository = getRepository( configuration );
+ if ( existingRepository == null )
+ {
+ addActionError( "A repository with that id does not exist" );
+ return ERROR;
+ }
+
+ // TODO: remove from index too!
+
+ removeRepository( configuration, existingRepository );
+
+ configurationStore.storeConfiguration( configuration );
+
+ if ( "delete-contents".equals( operation ) )
+ {
+ removeContents( existingRepository );
+ }
+ }
+
+ return SUCCESS;
+ }
+
+ protected abstract void removeContents( AbstractRepositoryConfiguration existingRepository )
+ throws IOException;
+
+ protected abstract AbstractRepositoryConfiguration getRepository( Configuration configuration );
+
+ protected abstract void removeRepository( Configuration configuration,
+ AbstractRepositoryConfiguration existingRepository );
+
+ public String input()
+ {
+ return INPUT;
+ }
+
+ public String getRepoId()
+ {
+ return repoId;
+ }
+
+ public void setRepoId( String repoId )
+ {
+ this.repoId = repoId;
+ }
+
+ public String getOperation()
+ {
+ return operation;
+ }
+
+ public void setOperation( String operation )
+ {
+ this.operation = operation;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import com.opensymphony.xwork.ModelDriven;
+import com.opensymphony.xwork.Preparable;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationChangeException;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.apache.maven.archiva.configuration.ConfigurationStoreException;
+import org.apache.maven.archiva.configuration.InvalidConfigurationException;
+import org.apache.maven.archiva.indexing.RepositoryIndexException;
+import org.apache.maven.archiva.indexing.RepositoryIndexSearchException;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Configures the application.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureAction"
+ */
+public class ConfigureAction
+ extends ActionSupport
+ implements ModelDriven, Preparable
+{
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ /**
+ * The configuration.
+ */
+ private Configuration configuration;
+
+ public String execute()
+ throws IOException, RepositoryIndexException, RepositoryIndexSearchException, ConfigurationStoreException,
+ InvalidConfigurationException, ConfigurationChangeException
+ {
+ // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
+ // TODO: if this is changed, do we move the index or recreate it?
+
+ // Normalize the path
+ File file = new File( configuration.getIndexPath() );
+ configuration.setIndexPath( file.getCanonicalPath() );
+ if ( !file.exists() )
+ {
+ file.mkdirs();
+ // TODO: error handling when this fails, or is not a directory
+ }
+
+ // Just double checking that our validation routines line up with what is expected in the configuration
+ assert configuration.isValid();
+
+ configurationStore.storeConfiguration( configuration );
+
+ // TODO: if the repository has changed, we need to check if indexing is needed
+
+ addActionMessage( "Successfully saved configuration" );
+
+ return SUCCESS;
+ }
+
+ public String input()
+ {
+ return INPUT;
+ }
+
+ public Object getModel()
+ {
+ return configuration;
+ }
+
+ public void prepare()
+ throws ConfigurationStoreException
+ {
+ configuration = configurationStore.getConfigurationFromStore();
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.ProxiedRepositoryConfiguration;
+
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureProxiedRepositoryAction"
+ */
+public class ConfigureProxiedRepositoryAction
+ extends AbstractConfigureRepositoryAction
+{
+ protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeProxiedRepository( (ProxiedRepositoryConfiguration) existingRepository );
+ }
+
+ protected AbstractRepositoryConfiguration getRepository( String id )
+ {
+ return configuration.getProxiedRepositoryById( id );
+ }
+
+ protected void addRepository()
+ throws IOException
+ {
+ ProxiedRepositoryConfiguration repository = (ProxiedRepositoryConfiguration) getRepository();
+
+ configuration.addProxiedRepository( repository );
+ }
+
+ protected AbstractRepositoryConfiguration createRepository()
+ {
+ return new ProxiedRepositoryConfiguration();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureRepositoryAction"
+ */
+public class ConfigureRepositoryAction
+ extends AbstractConfigureRepositoryAction
+{
+ protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeRepository( (RepositoryConfiguration) existingRepository );
+ }
+
+ protected AbstractRepositoryConfiguration getRepository( String id )
+ {
+ return configuration.getRepositoryById( id );
+ }
+
+ protected void addRepository()
+ throws IOException
+ {
+ RepositoryConfiguration repository = (RepositoryConfiguration) getRepository();
+
+ // Normalize the path
+ File file = new File( repository.getDirectory() );
+ repository.setDirectory( file.getCanonicalPath() );
+ if ( !file.exists() )
+ {
+ file.mkdirs();
+ // TODO: error handling when this fails, or is not a directory
+ }
+
+ configuration.addRepository( repository );
+ }
+
+ protected AbstractRepositoryConfiguration createRepository()
+ {
+ RepositoryConfiguration repository = new RepositoryConfiguration();
+ repository.setIndexed( false );
+ return repository;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.SyncedRepositoryConfiguration;
+
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureSyncedRepositoryAction"
+ */
+public class ConfigureSyncedRepositoryAction
+ extends AbstractConfigureRepositoryAction
+{
+ protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeSyncedRepository( (SyncedRepositoryConfiguration) existingRepository );
+ }
+
+ protected AbstractRepositoryConfiguration getRepository( String id )
+ {
+ return configuration.getSyncedRepositoryById( id );
+ }
+
+ protected void addRepository()
+ throws IOException
+ {
+ SyncedRepositoryConfiguration repository = (SyncedRepositoryConfiguration) getRepository();
+
+ configuration.addSyncedRepository( repository );
+ }
+
+ protected AbstractRepositoryConfiguration createRepository()
+ {
+ return new SyncedRepositoryConfiguration();
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ProxiedRepositoryConfiguration;
+
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteProxiedRepositoryAction"
+ */
+public class DeleteProxiedRepositoryAction
+ extends AbstractDeleteRepositoryAction
+{
+ protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
+ {
+ return configuration.getProxiedRepositoryById( repoId );
+ }
+
+ protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeProxiedRepository( (ProxiedRepositoryConfiguration) existingRepository );
+ }
+
+ protected void removeContents( AbstractRepositoryConfiguration existingRepository )
+ throws IOException
+ {
+ // TODO!
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.codehaus.plexus.util.FileUtils;
+
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteRepositoryAction"
+ */
+public class DeleteRepositoryAction
+ extends AbstractDeleteRepositoryAction
+{
+ protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
+ {
+ return configuration.getRepositoryById( repoId );
+ }
+
+ protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeRepository( (RepositoryConfiguration) existingRepository );
+ }
+
+ protected void removeContents( AbstractRepositoryConfiguration existingRepository )
+ throws IOException
+ {
+ RepositoryConfiguration repository = (RepositoryConfiguration) existingRepository;
+ getLogger().info( "Removing " + repository.getDirectory() );
+ FileUtils.deleteDirectory( repository.getDirectory() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.maven.archiva.configuration.AbstractRepositoryConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.SyncedRepositoryConfiguration;
+
+import java.io.IOException;
+
+/**
+ * Configures the application repositories.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteSyncedRepositoryAction"
+ */
+public class DeleteSyncedRepositoryAction
+ extends AbstractDeleteRepositoryAction
+{
+ protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
+ {
+ return configuration.getSyncedRepositoryById( repoId );
+ }
+
+ protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
+ {
+ configuration.removeSyncedRepository( (SyncedRepositoryConfiguration) existingRepository );
+ }
+
+ protected void removeContents( AbstractRepositoryConfiguration existingRepository )
+ throws IOException
+ {
+ // TODO!
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.action.admin;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionSupport;
+import org.apache.maven.archiva.scheduler.RepositoryTaskScheduler;
+import org.apache.maven.archiva.scheduler.TaskExecutionException;
+
+/**
+ * Configures the application.
+ *
+ * @plexus.component role="com.opensymphony.xwork.Action" role-hint="runIndexerAction"
+ */
+public class RunIndexerAction
+ extends ActionSupport
+{
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryTaskScheduler taskScheduler;
+
+ public String execute()
+ throws TaskExecutionException
+ {
+ taskScheduler.runIndexer();
+
+ return SUCCESS;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.interceptor;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.xwork.ActionInvocation;
+import com.opensymphony.xwork.interceptor.Interceptor;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ConfigurationStore;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
+
+/**
+ * An interceptor that makes the application configuration available
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ * @todo might be a generally useful thing in plexus-xwork-integration
+ * @plexus.component role="com.opensymphony.xwork.interceptor.Interceptor" role-hint="configurationInterceptor"
+ */
+public class ConfigurationInterceptor
+ extends AbstractLogEnabled
+ implements Interceptor
+{
+ /**
+ * @plexus.requirement
+ */
+ private ConfigurationStore configurationStore;
+
+ public String intercept( ActionInvocation actionInvocation )
+ throws Exception
+ {
+ Configuration configuration = configurationStore.getConfigurationFromStore();
+
+ if ( !configuration.isValid() )
+ {
+ if ( configuration.getRepositories().isEmpty() )
+ {
+ getLogger().info( "No repositories were configured - forwarding to repository configuration page" );
+ return "config-repository-needed";
+ }
+ else
+ {
+ getLogger().info( "Configuration is incomplete - forwarding to configuration page" );
+ return "config-needed";
+ }
+ }
+ else
+ {
+ return actionInvocation.invoke();
+ }
+ }
+
+ public void destroy()
+ {
+ // This space left intentionally blank
+ }
+
+ public void init()
+ {
+ // This space left intentionally blank
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.manager.web.mapper;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import com.opensymphony.webwork.dispatcher.mapper.ActionMapping;
+import com.opensymphony.webwork.dispatcher.mapper.DefaultActionMapper;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Map alternate URLs to specific actions. Used for the repository browser and the proxy.
+ *
+ * @author <a href="mailto:brett@apache.org">Brett Porter</a>
+ */
+public class RepositoryActionMapper
+ extends DefaultActionMapper
+{
+ private static final String BROWSE_PREFIX = "/browse/";
+
+ private static final String PROXY_PREFIX = "/proxy/";
+
+ public String getUriFromActionMapping( ActionMapping actionMapping )
+ {
+ Map params = actionMapping.getParams();
+ if ( "browseGroup".equals( actionMapping.getName() ) )
+ {
+ return BROWSE_PREFIX + params.remove( "groupId" );
+ }
+ else if ( "browseArtifact".equals( actionMapping.getName() ) )
+ {
+ return BROWSE_PREFIX + params.remove( "groupId" ) + "/" + params.remove( "artifactId" );
+ }
+ else if ( "showArtifact".equals( actionMapping.getName() ) )
+ {
+ return BROWSE_PREFIX + params.remove( "groupId" ) + "/" + params.remove( "artifactId" ) + "/" +
+ params.remove( "version" );
+ }
+ else if ( "proxy".equals( actionMapping.getName() ) )
+ {
+ return PROXY_PREFIX + params.remove( "path" );
+ }
+
+ return super.getUriFromActionMapping( actionMapping );
+ }
+
+ public ActionMapping getMapping( HttpServletRequest httpServletRequest )
+ {
+ String path = httpServletRequest.getServletPath();
+ if ( path.startsWith( BROWSE_PREFIX ) )
+ {
+ path = path.substring( BROWSE_PREFIX.length() );
+ if ( path.length() == 0 )
+ {
+ return new ActionMapping( "browse", "/", "", null );
+ }
+ else
+ {
+ String[] parts = path.split( "/" );
+ if ( parts.length == 1 )
+ {
+ Map params = new HashMap();
+ params.put( "groupId", parts[0] );
+ return new ActionMapping( "browseGroup", "/", "", params );
+ }
+ else if ( parts.length == 2 )
+ {
+ Map params = new HashMap();
+ params.put( "groupId", parts[0] );
+ params.put( "artifactId", parts[1] );
+ return new ActionMapping( "browseArtifact", "/", "", params );
+ }
+ else if ( parts.length == 3 )
+ {
+ Map params = new HashMap();
+ params.put( "groupId", parts[0] );
+ params.put( "artifactId", parts[1] );
+ params.put( "version", parts[2] );
+ return new ActionMapping( "showArtifact", "/", "", params );
+ }
+ }
+ }
+ else if ( path.startsWith( PROXY_PREFIX ) )
+ {
+ // retain the leading /
+ path = path.substring( PROXY_PREFIX.length() - 1 );
+
+ Map params = new HashMap();
+ params.put( "path", path );
+ return new ActionMapping( "proxy", "/", "", params );
+ }
+
+ return super.getMapping( httpServletRequest );
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.lucene.LuceneQuery;
-import org.apache.maven.repository.indexing.record.StandardArtifactIndexRecord;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-import org.codehaus.plexus.util.StringUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.StringTokenizer;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-/**
- * Browse the repository.
- *
- * @todo the tree part probably belongs in a browsing component, and the indexer could optimize how it retrieves the terms rather than querying everything
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="browseAction"
- */
-public class BrowseAction
- extends ActionSupport
-{
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory factory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- private List groups;
-
- private String groupId;
-
- private static final String GROUP_SEPARATOR = ".";
-
- private List artifactIds;
-
- private String artifactId;
-
- private List versions;
-
- public String browse()
- throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
- {
- RepositoryArtifactIndex index = getIndex();
-
- if ( !index.exists() )
- {
- addActionError( "The repository is not yet indexed. Please wait, and then try again." );
- return ERROR;
- }
-
- GroupTreeNode rootNode = buildGroupTree( index );
-
- this.groups = collateGroups( rootNode );
-
- return SUCCESS;
- }
-
- public String browseGroup()
- throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
- {
- RepositoryArtifactIndex index = getIndex();
-
- if ( !index.exists() )
- {
- addActionError( "The repository is not yet indexed. Please wait, and then try again." );
- return ERROR;
- }
-
- GroupTreeNode rootNode = buildGroupTree( index );
-
- if ( StringUtils.isEmpty( groupId ) )
- {
- // TODO: i18n
- addActionError( "You must specify a group ID to browse" );
- return ERROR;
- }
-
- StringTokenizer tok = new StringTokenizer( groupId, GROUP_SEPARATOR );
- while ( tok.hasMoreTokens() )
- {
- String part = tok.nextToken();
-
- if ( !rootNode.getChildren().containsKey( part ) )
- {
- // TODO: i18n
- addActionError( "The group specified was not found" );
- return ERROR;
- }
- else
- {
- rootNode = (GroupTreeNode) rootNode.getChildren().get( part );
- }
- }
-
- this.groups = collateGroups( rootNode );
-
- List records = index.search(
- new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ) ) );
-
- Set artifactIds = new HashSet();
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
- artifactIds.add( record.getArtifactId() );
- }
- this.artifactIds = new ArrayList( artifactIds );
- Collections.sort( this.artifactIds );
-
- return SUCCESS;
- }
-
- public String browseArtifact()
- throws ConfigurationStoreException, RepositoryIndexException, IOException, RepositoryIndexSearchException
- {
- RepositoryArtifactIndex index = getIndex();
-
- if ( StringUtils.isEmpty( groupId ) )
- {
- // TODO: i18n
- addActionError( "You must specify a group ID to browse" );
- return ERROR;
- }
-
- if ( StringUtils.isEmpty( artifactId ) )
- {
- // TODO: i18n
- addActionError( "You must specify a artifact ID to browse" );
- return ERROR;
- }
-
- BooleanQuery query = new BooleanQuery();
- query.add( new TermQuery( new Term( StandardIndexRecordFields.GROUPID_EXACT, groupId ) ),
- BooleanClause.Occur.MUST );
- query.add( new TermQuery( new Term( StandardIndexRecordFields.ARTIFACTID_EXACT, artifactId ) ),
- BooleanClause.Occur.MUST );
-
- List records = index.search( new LuceneQuery( query ) );
-
- if ( records.isEmpty() )
- {
- // TODO: i18n
- addActionError( "Could not find any artifacts with the given group and artifact ID" );
- return ERROR;
- }
-
- Set versions = new HashSet();
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
- versions.add( record.getVersion() );
- }
-
- this.versions = new ArrayList( versions );
- Collections.sort( this.versions );
-
- return SUCCESS;
- }
-
- private GroupTreeNode buildGroupTree( RepositoryArtifactIndex index )
- throws IOException, RepositoryIndexSearchException
- {
- // TODO: give action message if indexing is in progress
-
- // TODO: this will be inefficient over a very large number of artifacts, should be cached
-
- List records = index.search( new LuceneQuery( new MatchAllDocsQuery() ) );
-
- Set groups = new TreeSet();
- for ( Iterator i = records.iterator(); i.hasNext(); )
- {
- StandardArtifactIndexRecord record = (StandardArtifactIndexRecord) i.next();
- groups.add( record.getGroupId() );
- }
-
- GroupTreeNode rootNode = new GroupTreeNode();
-
- // build a tree structure
- for ( Iterator i = groups.iterator(); i.hasNext(); )
- {
- String groupId = (String) i.next();
-
- StringTokenizer tok = new StringTokenizer( groupId, GROUP_SEPARATOR );
-
- GroupTreeNode node = rootNode;
-
- while ( tok.hasMoreTokens() )
- {
- String part = tok.nextToken();
-
- if ( !node.getChildren().containsKey( part ) )
- {
- GroupTreeNode newNode = new GroupTreeNode( part, node );
- node.addChild( newNode );
- node = newNode;
- }
- else
- {
- node = (GroupTreeNode) node.getChildren().get( part );
- }
- }
- }
- return rootNode;
- }
-
- private List collateGroups( GroupTreeNode rootNode )
- {
- List groups = new ArrayList();
- for ( Iterator i = rootNode.getChildren().values().iterator(); i.hasNext(); )
- {
- GroupTreeNode node = (GroupTreeNode) i.next();
-
- while ( node.getChildren().size() == 1 )
- {
- node = (GroupTreeNode) node.getChildren().values().iterator().next();
- }
-
- groups.add( node.getFullName() );
- }
- return groups;
- }
-
- private RepositoryArtifactIndex getIndex()
- throws ConfigurationStoreException, RepositoryIndexException
- {
- Configuration configuration = configurationStore.getConfigurationFromStore();
- File indexPath = new File( configuration.getIndexPath() );
-
- return factory.createStandardIndex( indexPath );
- }
-
- public List getGroups()
- {
- return groups;
- }
-
- public List getArtifactIds()
- {
- return artifactIds;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public void setArtifactId( String artifactId )
- {
- this.artifactId = artifactId;
- }
-
- public List getVersions()
- {
- return versions;
- }
-
- private static class GroupTreeNode
- {
- private final String name;
-
- private final String fullName;
-
- private final Map children = new TreeMap();
-
- GroupTreeNode()
- {
- name = null;
- fullName = null;
- }
-
- GroupTreeNode( String name, GroupTreeNode parent )
- {
- this.name = name;
- this.fullName = parent.fullName != null ? parent.fullName + GROUP_SEPARATOR + name : name;
- }
-
- public String getName()
- {
- return name;
- }
-
- public String getFullName()
- {
- return fullName;
- }
-
- public Map getChildren()
- {
- return children;
- }
-
- public void addChild( GroupTreeNode newNode )
- {
- children.put( newNode.name, newNode );
- }
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import org.apache.maven.repository.proxy.ProxyException;
-import org.apache.maven.repository.proxy.ProxyManager;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-
-/**
- * Proxy functionality.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="proxyAction"
- */
-public class ProxyAction
- extends ActionSupport
-{
- /**
- * @plexus.requirement
- */
- private ProxyManager proxyManager;
-
- private String path;
-
- private String filename;
-
- private String contentType;
-
- private static final String NOT_FOUND = "notFound";
-
- private InputStream artifactStream;
-
- public String execute()
- throws ProxyException
- {
- try
- {
- File file = proxyManager.get( path );
-
- artifactStream = new FileInputStream( file );
-
- // TODO: could be better
- contentType = "application/octet-stream";
-
- filename = file.getName();
- }
- catch ( ResourceDoesNotExistException e )
- {
- // TODO: set message?
- return NOT_FOUND;
- }
- catch ( FileNotFoundException e )
- {
- // TODO: set message?
- return NOT_FOUND;
- }
-
- return SUCCESS;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public void setPath( String path )
- {
- this.path = path;
- }
-
- public String getFilename()
- {
- return filename;
- }
-
- public String getContentType()
- {
- return contentType;
- }
-
- public InputStream getArtifactStream()
- {
- return artifactStream;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.queryParser.MultiFieldQueryParser;
-import org.apache.lucene.queryParser.ParseException;
-import org.apache.lucene.search.TermQuery;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.ConfiguredRepositoryFactory;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndex;
-import org.apache.maven.repository.indexing.RepositoryArtifactIndexFactory;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-import org.apache.maven.repository.indexing.lucene.LuceneQuery;
-import org.apache.maven.repository.indexing.record.StandardIndexRecordFields;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.util.List;
-
-/**
- * Search all indexed fields by the given criteria.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="searchAction"
- */
-public class SearchAction
- extends ActionSupport
-{
- /**
- * Query string.
- */
- private String q;
-
- /**
- * The MD5 to search by.
- */
- private String md5;
-
- /**
- * Search results.
- */
- private List searchResults;
-
- /**
- * @plexus.requirement
- */
- private RepositoryArtifactIndexFactory factory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- private static final String NO_RESULTS = "noResults";
-
- private static final String RESULTS = "results";
-
- private static final String ARTIFACT = "artifact";
-
- public String quickSearch()
- throws MalformedURLException, RepositoryIndexException, RepositoryIndexSearchException,
- ConfigurationStoreException, ParseException
- {
- // TODO: give action message if indexing is in progress
-
- assert q != null && q.length() != 0;
-
- RepositoryArtifactIndex index = getIndex();
-
- if ( !index.exists() )
- {
- addActionError( "The repository is not yet indexed. Please wait, and then try again." );
- return ERROR;
- }
-
- // TODO! this is correct, but ugly
- MultiFieldQueryParser parser = new MultiFieldQueryParser( new String[]{StandardIndexRecordFields.GROUPID,
- StandardIndexRecordFields.ARTIFACTID, StandardIndexRecordFields.BASE_VERSION,
- StandardIndexRecordFields.CLASSIFIER, StandardIndexRecordFields.CLASSES, StandardIndexRecordFields.FILES,
- StandardIndexRecordFields.TYPE, StandardIndexRecordFields.PROJECT_NAME,
- StandardIndexRecordFields.PROJECT_DESCRIPTION}, new StandardAnalyzer() );
- searchResults = index.search( new LuceneQuery( parser.parse( q ) ) );
-
- return SUCCESS;
- }
-
- public String findArtifact()
- throws Exception
- {
- // TODO: give action message if indexing is in progress
-
- assert md5 != null && md5.length() != 0;
-
- RepositoryArtifactIndex index = getIndex();
-
- if ( !index.exists() )
- {
- addActionError( "The repository is not yet indexed. Please wait, and then try again." );
- return ERROR;
- }
-
- searchResults = index.search(
- new LuceneQuery( new TermQuery( new Term( StandardIndexRecordFields.MD5, md5.toLowerCase() ) ) ) );
-
- if ( searchResults.isEmpty() )
- {
- return NO_RESULTS;
- }
- if ( searchResults.size() == 1 )
- {
- return ARTIFACT;
- }
- else
- {
- return RESULTS;
- }
- }
-
- private RepositoryArtifactIndex getIndex()
- throws ConfigurationStoreException, RepositoryIndexException
- {
- Configuration configuration = configurationStore.getConfigurationFromStore();
- File indexPath = new File( configuration.getIndexPath() );
-
- return factory.createStandardIndex( indexPath );
- }
-
- public String doInput()
- {
- return INPUT;
- }
-
- public String getQ()
- {
- return q;
- }
-
- public void setQ( String q )
- {
- this.q = q;
- }
-
- public String getMd5()
- {
- return md5;
- }
-
- public void setMd5( String md5 )
- {
- this.md5 = md5;
- }
-
- public List getSearchResults()
- {
- return searchResults;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.model.Model;
-import org.apache.maven.project.MavenProject;
-import org.apache.maven.project.MavenProjectBuilder;
-import org.apache.maven.project.ProjectBuildingException;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.ConfiguredRepositoryFactory;
-import org.codehaus.plexus.util.StringUtils;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * Browse the repository.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="showArtifactAction"
- */
-public class ShowArtifactAction
- extends ActionSupport
-{
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private ConfiguredRepositoryFactory repositoryFactory;
-
- /**
- * @plexus.requirement
- */
- private MavenProjectBuilder projectBuilder;
-
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- private String groupId;
-
- private String artifactId;
-
- private String version;
-
- private Model model;
-
- public String execute()
- throws ConfigurationStoreException, IOException, XmlPullParserException, ProjectBuildingException
- {
- if ( StringUtils.isEmpty( groupId ) )
- {
- // TODO: i18n
- addActionError( "You must specify a group ID to browse" );
- return ERROR;
- }
-
- if ( StringUtils.isEmpty( artifactId ) )
- {
- // TODO: i18n
- addActionError( "You must specify a artifact ID to browse" );
- return ERROR;
- }
-
- if ( StringUtils.isEmpty( version ) )
- {
- // TODO: i18n
- addActionError( "You must specify a version to browse" );
- return ERROR;
- }
-
- Configuration configuration = configurationStore.getConfigurationFromStore();
- List repositories = repositoryFactory.createRepositories( configuration );
-
- Artifact artifact = artifactFactory.createProjectArtifact( groupId, artifactId, version );
- // TODO: maybe we can decouple the assembly parts of the project builder from the repository handling to get rid of the temp repo
- ArtifactRepository localRepository = repositoryFactory.createLocalRepository( configuration );
- MavenProject project = projectBuilder.buildFromRepository( artifact, repositories, localRepository );
-
- model = project.getModel();
-
- return SUCCESS;
- }
-
- public Model getModel()
- {
- return model;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public void setArtifactId( String artifactId )
- {
- this.artifactId = artifactId;
- }
-
- public String getVersion()
- {
- return version;
- }
-
- public void setVersion( String version )
- {
- this.version = version;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import com.opensymphony.xwork.ModelDriven;
-import com.opensymphony.xwork.Preparable;
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationChangeException;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.InvalidConfigurationException;
-
-import java.io.IOException;
-
-/**
- * Base action for repository configuration actions.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractConfigureRepositoryAction
- extends ActionSupport
- implements ModelDriven, Preparable
-{
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- /**
- * The repository.
- */
- private AbstractRepositoryConfiguration repository;
-
- /**
- * The repository ID to lookup when editing a repository.
- */
- private String repoId;
-
- /**
- * The previously read configuration.
- */
- protected Configuration configuration;
-
- public String add()
- throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
- {
- // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
-
- AbstractRepositoryConfiguration existingRepository = getRepository( repository.getId() );
- if ( existingRepository != null )
- {
- addFieldError( "id", "A repository with that id already exists" );
- return INPUT;
- }
-
- return saveConfiguration();
- }
-
- public String edit()
- throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
- {
- // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
-
- AbstractRepositoryConfiguration existingRepository = getRepository( repository.getId() );
- removeRepository( existingRepository );
-
- return saveConfiguration();
- }
-
- protected abstract void removeRepository( AbstractRepositoryConfiguration existingRepository );
-
- protected abstract AbstractRepositoryConfiguration getRepository( String id );
-
- private String saveConfiguration()
- throws IOException, ConfigurationStoreException, InvalidConfigurationException, ConfigurationChangeException
- {
- addRepository();
-
- configurationStore.storeConfiguration( configuration );
-
- // TODO: do we need to check if indexing is needed?
-
- addActionMessage( "Successfully saved configuration" );
-
- return SUCCESS;
- }
-
- protected abstract void addRepository()
- throws IOException;
-
- public String input()
- {
- return INPUT;
- }
-
- public Object getModel()
- {
- return repository;
- }
-
- protected abstract AbstractRepositoryConfiguration createRepository();
-
- public void prepare()
- throws ConfigurationStoreException
- {
- configuration = configurationStore.getConfigurationFromStore();
-
- if ( repository == null )
- {
- repository = getRepository( repoId );
- }
- if ( repository == null )
- {
- repository = createRepository();
- }
- }
-
- public String getRepoId()
- {
- return repoId;
- }
-
- public void setRepoId( String repoId )
- {
- this.repoId = repoId;
- }
-
- protected AbstractRepositoryConfiguration getRepository()
- {
- return repository;
- }
-
- public Configuration getConfiguration()
- {
- return configuration;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationChangeException;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.InvalidConfigurationException;
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.codehaus.plexus.xwork.action.PlexusActionSupport;
-
-import java.io.IOException;
-
-/**
- * Base action for repository removal actions.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public abstract class AbstractDeleteRepositoryAction
- extends PlexusActionSupport
-{
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- /**
- * The repository ID to lookup when editing a repository.
- */
- protected String repoId;
-
- /**
- * Which operation to select.
- */
- private String operation = "unmodified";
-
- public String execute()
- throws ConfigurationStoreException, IOException, InvalidConfigurationException, ConfigurationChangeException
- {
- // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
-
- if ( "delete-entry".equals( operation ) || "delete-contents".equals( operation ) )
- {
- Configuration configuration = configurationStore.getConfigurationFromStore();
-
- AbstractRepositoryConfiguration existingRepository = getRepository( configuration );
- if ( existingRepository == null )
- {
- addActionError( "A repository with that id does not exist" );
- return ERROR;
- }
-
- // TODO: remove from index too!
-
- removeRepository( configuration, existingRepository );
-
- configurationStore.storeConfiguration( configuration );
-
- if ( "delete-contents".equals( operation ) )
- {
- removeContents( existingRepository );
- }
- }
-
- return SUCCESS;
- }
-
- protected abstract void removeContents( AbstractRepositoryConfiguration existingRepository )
- throws IOException;
-
- protected abstract AbstractRepositoryConfiguration getRepository( Configuration configuration );
-
- protected abstract void removeRepository( Configuration configuration,
- AbstractRepositoryConfiguration existingRepository );
-
- public String input()
- {
- return INPUT;
- }
-
- public String getRepoId()
- {
- return repoId;
- }
-
- public void setRepoId( String repoId )
- {
- this.repoId = repoId;
- }
-
- public String getOperation()
- {
- return operation;
- }
-
- public void setOperation( String operation )
- {
- this.operation = operation;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import com.opensymphony.xwork.ModelDriven;
-import com.opensymphony.xwork.Preparable;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationChangeException;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.apache.maven.repository.configuration.ConfigurationStoreException;
-import org.apache.maven.repository.configuration.InvalidConfigurationException;
-import org.apache.maven.repository.indexing.RepositoryIndexException;
-import org.apache.maven.repository.indexing.RepositoryIndexSearchException;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Configures the application.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureAction"
- */
-public class ConfigureAction
- extends ActionSupport
- implements ModelDriven, Preparable
-{
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- /**
- * The configuration.
- */
- private Configuration configuration;
-
- public String execute()
- throws IOException, RepositoryIndexException, RepositoryIndexSearchException, ConfigurationStoreException,
- InvalidConfigurationException, ConfigurationChangeException
- {
- // TODO: if this didn't come from the form, go to configure.action instead of going through with re-saving what was just loaded
- // TODO: if this is changed, do we move the index or recreate it?
-
- // Normalize the path
- File file = new File( configuration.getIndexPath() );
- configuration.setIndexPath( file.getCanonicalPath() );
- if ( !file.exists() )
- {
- file.mkdirs();
- // TODO: error handling when this fails, or is not a directory
- }
-
- // Just double checking that our validation routines line up with what is expected in the configuration
- assert configuration.isValid();
-
- configurationStore.storeConfiguration( configuration );
-
- // TODO: if the repository has changed, we need to check if indexing is needed
-
- addActionMessage( "Successfully saved configuration" );
-
- return SUCCESS;
- }
-
- public String input()
- {
- return INPUT;
- }
-
- public Object getModel()
- {
- return configuration;
- }
-
- public void prepare()
- throws ConfigurationStoreException
- {
- configuration = configurationStore.getConfigurationFromStore();
- }
-}
\ No newline at end of file
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.ProxiedRepositoryConfiguration;
-
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureProxiedRepositoryAction"
- */
-public class ConfigureProxiedRepositoryAction
- extends AbstractConfigureRepositoryAction
-{
- protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeProxiedRepository( (ProxiedRepositoryConfiguration) existingRepository );
- }
-
- protected AbstractRepositoryConfiguration getRepository( String id )
- {
- return configuration.getProxiedRepositoryById( id );
- }
-
- protected void addRepository()
- throws IOException
- {
- ProxiedRepositoryConfiguration repository = (ProxiedRepositoryConfiguration) getRepository();
-
- configuration.addProxiedRepository( repository );
- }
-
- protected AbstractRepositoryConfiguration createRepository()
- {
- return new ProxiedRepositoryConfiguration();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-
-import java.io.File;
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureRepositoryAction"
- */
-public class ConfigureRepositoryAction
- extends AbstractConfigureRepositoryAction
-{
- protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeRepository( (RepositoryConfiguration) existingRepository );
- }
-
- protected AbstractRepositoryConfiguration getRepository( String id )
- {
- return configuration.getRepositoryById( id );
- }
-
- protected void addRepository()
- throws IOException
- {
- RepositoryConfiguration repository = (RepositoryConfiguration) getRepository();
-
- // Normalize the path
- File file = new File( repository.getDirectory() );
- repository.setDirectory( file.getCanonicalPath() );
- if ( !file.exists() )
- {
- file.mkdirs();
- // TODO: error handling when this fails, or is not a directory
- }
-
- configuration.addRepository( repository );
- }
-
- protected AbstractRepositoryConfiguration createRepository()
- {
- RepositoryConfiguration repository = new RepositoryConfiguration();
- repository.setIndexed( false );
- return repository;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.SyncedRepositoryConfiguration;
-
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="configureSyncedRepositoryAction"
- */
-public class ConfigureSyncedRepositoryAction
- extends AbstractConfigureRepositoryAction
-{
- protected void removeRepository( AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeSyncedRepository( (SyncedRepositoryConfiguration) existingRepository );
- }
-
- protected AbstractRepositoryConfiguration getRepository( String id )
- {
- return configuration.getSyncedRepositoryById( id );
- }
-
- protected void addRepository()
- throws IOException
- {
- SyncedRepositoryConfiguration repository = (SyncedRepositoryConfiguration) getRepository();
-
- configuration.addSyncedRepository( repository );
- }
-
- protected AbstractRepositoryConfiguration createRepository()
- {
- return new SyncedRepositoryConfiguration();
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ProxiedRepositoryConfiguration;
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteProxiedRepositoryAction"
- */
-public class DeleteProxiedRepositoryAction
- extends AbstractDeleteRepositoryAction
-{
- protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
- {
- return configuration.getProxiedRepositoryById( repoId );
- }
-
- protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeProxiedRepository( (ProxiedRepositoryConfiguration) existingRepository );
- }
-
- protected void removeContents( AbstractRepositoryConfiguration existingRepository )
- throws IOException
- {
- // TODO!
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.RepositoryConfiguration;
-import org.codehaus.plexus.util.FileUtils;
-
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteRepositoryAction"
- */
-public class DeleteRepositoryAction
- extends AbstractDeleteRepositoryAction
-{
- protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
- {
- return configuration.getRepositoryById( repoId );
- }
-
- protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeRepository( (RepositoryConfiguration) existingRepository );
- }
-
- protected void removeContents( AbstractRepositoryConfiguration existingRepository )
- throws IOException
- {
- RepositoryConfiguration repository = (RepositoryConfiguration) existingRepository;
- getLogger().info( "Removing " + repository.getDirectory() );
- FileUtils.deleteDirectory( repository.getDirectory() );
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.maven.repository.configuration.AbstractRepositoryConfiguration;
-import org.apache.maven.repository.configuration.SyncedRepositoryConfiguration;
-import org.apache.maven.repository.configuration.Configuration;
-
-import java.io.IOException;
-
-/**
- * Configures the application repositories.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="deleteSyncedRepositoryAction"
- */
-public class DeleteSyncedRepositoryAction
- extends AbstractDeleteRepositoryAction
-{
- protected AbstractRepositoryConfiguration getRepository( Configuration configuration )
- {
- return configuration.getSyncedRepositoryById( repoId );
- }
-
- protected void removeRepository( Configuration configuration, AbstractRepositoryConfiguration existingRepository )
- {
- configuration.removeSyncedRepository( (SyncedRepositoryConfiguration) existingRepository );
- }
-
- protected void removeContents( AbstractRepositoryConfiguration existingRepository )
- throws IOException
- {
- // TODO!
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.action.admin;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionSupport;
-import org.apache.maven.repository.scheduler.RepositoryTaskScheduler;
-import org.apache.maven.repository.scheduler.TaskExecutionException;
-
-/**
- * Configures the application.
- *
- * @plexus.component role="com.opensymphony.xwork.Action" role-hint="runIndexerAction"
- */
-public class RunIndexerAction
- extends ActionSupport
-{
- /**
- * @plexus.requirement
- */
- private RepositoryTaskScheduler taskScheduler;
-
- public String execute()
- throws TaskExecutionException
- {
- taskScheduler.runIndexer();
-
- return SUCCESS;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.interceptor;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.xwork.ActionInvocation;
-import com.opensymphony.xwork.interceptor.Interceptor;
-import org.apache.maven.repository.configuration.Configuration;
-import org.apache.maven.repository.configuration.ConfigurationStore;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-/**
- * An interceptor that makes the application configuration available
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @todo might be a generally useful thing in plexus-xwork-integration
- * @plexus.component role="com.opensymphony.xwork.interceptor.Interceptor" role-hint="configurationInterceptor"
- */
-public class ConfigurationInterceptor
- extends AbstractLogEnabled
- implements Interceptor
-{
- /**
- * @plexus.requirement
- */
- private ConfigurationStore configurationStore;
-
- public String intercept( ActionInvocation actionInvocation )
- throws Exception
- {
- Configuration configuration = configurationStore.getConfigurationFromStore();
-
- if ( !configuration.isValid() )
- {
- if ( configuration.getRepositories().isEmpty() )
- {
- getLogger().info( "No repositories were configured - forwarding to repository configuration page" );
- return "config-repository-needed";
- }
- else
- {
- getLogger().info( "Configuration is incomplete - forwarding to configuration page" );
- return "config-needed";
- }
- }
- else
- {
- return actionInvocation.invoke();
- }
- }
-
- public void destroy()
- {
- // This space left intentionally blank
- }
-
- public void init()
- {
- // This space left intentionally blank
- }
-}
+++ /dev/null
-package org.apache.maven.repository.manager.web.mapper;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.opensymphony.webwork.dispatcher.mapper.ActionMapping;
-import com.opensymphony.webwork.dispatcher.mapper.DefaultActionMapper;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Map alternate URLs to specific actions. Used for the repository browser and the proxy.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class RepositoryActionMapper
- extends DefaultActionMapper
-{
- private static final String BROWSE_PREFIX = "/browse/";
-
- private static final String PROXY_PREFIX = "/proxy/";
-
- public String getUriFromActionMapping( ActionMapping actionMapping )
- {
- Map params = actionMapping.getParams();
- if ( "browseGroup".equals( actionMapping.getName() ) )
- {
- return BROWSE_PREFIX + params.remove( "groupId" );
- }
- else if ( "browseArtifact".equals( actionMapping.getName() ) )
- {
- return BROWSE_PREFIX + params.remove( "groupId" ) + "/" + params.remove( "artifactId" );
- }
- else if ( "showArtifact".equals( actionMapping.getName() ) )
- {
- return BROWSE_PREFIX + params.remove( "groupId" ) + "/" + params.remove( "artifactId" ) + "/" +
- params.remove( "version" );
- }
- else if ( "proxy".equals( actionMapping.getName() ) )
- {
- return PROXY_PREFIX + params.remove( "path" );
- }
-
- return super.getUriFromActionMapping( actionMapping );
- }
-
- public ActionMapping getMapping( HttpServletRequest httpServletRequest )
- {
- String path = httpServletRequest.getServletPath();
- if ( path.startsWith( BROWSE_PREFIX ) )
- {
- path = path.substring( BROWSE_PREFIX.length() );
- if ( path.length() == 0 )
- {
- return new ActionMapping( "browse", "/", "", null );
- }
- else
- {
- String[] parts = path.split( "/" );
- if ( parts.length == 1 )
- {
- Map params = new HashMap();
- params.put( "groupId", parts[0] );
- return new ActionMapping( "browseGroup", "/", "", params );
- }
- else if ( parts.length == 2 )
- {
- Map params = new HashMap();
- params.put( "groupId", parts[0] );
- params.put( "artifactId", parts[1] );
- return new ActionMapping( "browseArtifact", "/", "", params );
- }
- else if ( parts.length == 3 )
- {
- Map params = new HashMap();
- params.put( "groupId", parts[0] );
- params.put( "artifactId", parts[1] );
- params.put( "version", parts[2] );
- return new ActionMapping( "showArtifact", "/", "", params );
- }
- }
- }
- else if ( path.startsWith( PROXY_PREFIX ) )
- {
- // retain the leading /
- path = path.substring( PROXY_PREFIX.length() - 1 );
-
- Map params = new HashMap();
- params.put( "path", path );
- return new ActionMapping( "proxy", "/", "", params );
- }
-
- return super.getMapping( httpServletRequest );
- }
-}
<load-on-start>
<component>
- <role>org.apache.maven.repository.scheduler.RepositoryTaskScheduler</role>
+ <role>org.apache.maven.archiva.scheduler.RepositoryTaskScheduler</role>
</component>
</load-on-start>
</plexus>
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <field name="md5">
+ <field-validator type="requiredstring">
+ <message>
+ You must select a file, or enter the checksum. If the file was given and you receive this message,
+ there may have been an error generating the checksum.
+ </message>
+ </field-validator>
+ </field>
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <field name="q">
+ <field-validator type="requiredstring">
+ <message>You must enter some search terms.</message>
+ </field-validator>
+ </field>
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <field name="indexPath">
+ <field-validator type="requiredstring">
+ <message>You must enter the index directory.</message>
+ </field-validator>
+ </field>
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <!-- TODO: constrain more -->
+ <field name="id">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository identifier.</message>
+ </field-validator>
+ </field>
+ <field name="name">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository name.</message>
+ </field-validator>
+ </field>
+ <field name="url">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository URL.</message>
+ </field-validator>
+ </field>
+ <!-- TODO: validate managed repository -->
+ <!-- TODO: validate layout -->
+ <!-- TODO: validate policies -->
+ <!-- TODO: validate that intervals are integers -->
+ <!-- TODO: validate that intervals are empty if policy is not interval -->
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <!-- TODO: constrain more -->
+ <field name="id">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository identifier.</message>
+ </field-validator>
+ </field>
+ <field name="name">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository name.</message>
+ </field-validator>
+ </field>
+ <field name="directory">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository directory.</message>
+ </field-validator>
+ </field>
+ <!-- TODO: validate layout -->
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <field name="id">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository identifier.</message>
+ </field-validator>
+ </field>
+ <field name="name">
+ <field-validator type="requiredstring">
+ <message>You must enter the repository name.</message>
+ </field-validator>
+ </field>
+ <!-- TODO: validate managed repository -->
+ <!-- TODO: validate layout -->
+ <!-- TODO: validate sync settings, depending on what method -->
+</validators>
\ No newline at end of file
--- /dev/null
+<!--
+ ~ Copyright 2005-2006 The Apache Software Foundation.
+ ~
+ ~ Licensed under the Apache License, Version 2.0 (the "License");
+ ~ you may not use this file except in compliance with the License.
+ ~ You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <!-- TODO: constrain more -->
+ <field name="method">
+ <field-validator type="requiredstring">
+ <message>You must enter the synchronization method.</message>
+ </field-validator>
+ </field>
+</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <field name="md5">
- <field-validator type="requiredstring">
- <message>
- You must select a file, or enter the checksum. If the file was given and you receive this message,
- there may have been an error generating the checksum.
- </message>
- </field-validator>
- </field>
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <field name="q">
- <field-validator type="requiredstring">
- <message>You must enter some search terms.</message>
- </field-validator>
- </field>
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <field name="indexPath">
- <field-validator type="requiredstring">
- <message>You must enter the index directory.</message>
- </field-validator>
- </field>
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <!-- TODO: constrain more -->
- <field name="id">
- <field-validator type="requiredstring">
- <message>You must enter the repository identifier.</message>
- </field-validator>
- </field>
- <field name="name">
- <field-validator type="requiredstring">
- <message>You must enter the repository name.</message>
- </field-validator>
- </field>
- <field name="url">
- <field-validator type="requiredstring">
- <message>You must enter the repository URL.</message>
- </field-validator>
- </field>
- <!-- TODO: validate managed repository -->
- <!-- TODO: validate layout -->
- <!-- TODO: validate policies -->
- <!-- TODO: validate that intervals are integers -->
- <!-- TODO: validate that intervals are empty if policy is not interval -->
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <!-- TODO: constrain more -->
- <field name="id">
- <field-validator type="requiredstring">
- <message>You must enter the repository identifier.</message>
- </field-validator>
- </field>
- <field name="name">
- <field-validator type="requiredstring">
- <message>You must enter the repository name.</message>
- </field-validator>
- </field>
- <field name="directory">
- <field-validator type="requiredstring">
- <message>You must enter the repository directory.</message>
- </field-validator>
- </field>
- <!-- TODO: validate layout -->
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <field name="id">
- <field-validator type="requiredstring">
- <message>You must enter the repository identifier.</message>
- </field-validator>
- </field>
- <field name="name">
- <field-validator type="requiredstring">
- <message>You must enter the repository name.</message>
- </field-validator>
- </field>
- <!-- TODO: validate managed repository -->
- <!-- TODO: validate layout -->
- <!-- TODO: validate sync settings, depending on what method -->
-</validators>
\ No newline at end of file
+++ /dev/null
-<!--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <!-- TODO: constrain more -->
- <field name="method">
- <field-validator type="requiredstring">
- <message>You must enter the synchronization method.</message>
- </field-validator>
- </field>
-</validators>
\ No newline at end of file
# define our own action mapper here
-webwork.mapper.class=org.apache.maven.repository.manager.web.mapper.RepositoryActionMapper
+webwork.mapper.class=org.apache.maven.archiva.manager.web.mapper.RepositoryActionMapper
webwork.objectFactory = org.codehaus.plexus.xwork.PlexusObjectFactory
# TODO: package up a theme and share with Continuum. Should contain everything from xhtml, and set templateDir to WEB-INF/themes
\ No newline at end of file
<html>
<head>
<title>Find Artifact</title>
- <ww:head />
+ <ww:head/>
</head>
<body onload="document.checksumSearch.file.disabled = false">
<tr>
<td class="tdLabel"><label for="checksumSearch_file" class="label">Search for:</label></td>
<td>
- <input type="file" name="file" size="50" value="" id="checksumSearch_file" />
+ <input type="file" name="file" size="50" value="" id="checksumSearch_file"/>
</td>
</tr>
- <ww:textfield label="Checksum" size="50" name="md5" />
- <ww:submit value="Go!" />
+ <ww:textfield label="Checksum" size="50" name="md5"/>
+ <ww:submit value="Go!"/>
</ww:form>
<p>
<b>not</b>
be uploaded to the server. See the progress bar below for progress of
locally creating a checksum that is uploaded to the server after you hit "Go!".
- <ww:actionerror />
+ <ww:actionerror/>
</p>
<p>
- <applet code="org/apache/maven/repository/applet/ChecksumApplet.class"
- archive="maven-repository-artifact-applet.jar"
+ <applet code="org/apache/maven/archiva/applet/ChecksumApplet.class"
+ archive="archiva-applet.jar"
width="400" height="20" name="ChecksumApplet">
</applet>
</p>
+++ /dev/null
-<%--
- ~ Copyright 2005-2006 The Apache Software Foundation.
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- --%>
-
-<script type="text/javascript">
- function doSomething( file )
- {
- return document.ChecksumApplet.generateMd5(file);
- }
-</script>
-
-<p>Search by MD5 (select an artifact):</p>
-
-<form action="search.action"
- onsubmit="this.md5.value = doSomething(this.file.value); this.file.disabled = true;">
- <input name="md5" type="hidden" />
- <input type="file" name="file" />
- <input type="submit" value="Search" />
-</form>
-
-<p>
- <applet code="org/apache/maven/repository/applet/ChecksumApplet.class" archive="maven-repository-artifact-applet.jar"
- width="400" height="20" name="ChecksumApplet">
- </applet>
-</p>
-
-<p>Search:</p>
-
-<form action="searchg.action">
- <input name="searchString" type="text" />
- <input type="submit" value="Search" />
-</form>
-
-<p>Search by Java Package:</p>
-
-<form action="search.action">
- <input name="packageName" type="text" />
- <input type="submit" value="Search" />
-</form>
-
-
--- /dev/null
+package org.apache.maven.archiva.proxy.web.action.test.stub;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpSession;
+import java.security.Principal;
+import java.util.Enumeration;
+
+public class HttpServletRequestStub
+ extends ServletRequestStub
+ implements HttpServletRequest
+{
+
+ public String getAuthType()
+ {
+ return null;
+ }
+
+ public String getContextPath()
+ {
+ return "/location1/location2/location3";
+ }
+
+ public Cookie[] getCookies()
+ {
+ return null;
+ }
+
+ public long getDateHeader( String name )
+ {
+ return -1;
+ }
+
+ public String getHeader( String name )
+ {
+ return null;
+ }
+
+ public Enumeration getHeaderNames()
+ {
+ return null;
+ }
+
+ public Enumeration getHeaders( String name )
+ {
+ return null;
+ }
+
+ public int getIntHeader( String name )
+ {
+ return -1;
+ }
+
+ public String getMethod()
+ {
+ return null;
+ }
+
+ public String getPathInfo()
+ {
+ return null;
+ }
+
+ public String getPathTranslated()
+ {
+ return null;
+ }
+
+ public String getQueryString()
+ {
+ return null;
+ }
+
+ public String getRemoteUser()
+ {
+ return null;
+ }
+
+ public String getRequestedSessionId()
+ {
+ return null;
+ }
+
+ public String getRequestURI()
+ {
+ return "/projectname/repository/org/sometest/artifact-0.0.jar";
+ }
+
+ public StringBuffer getRequestURL()
+ {
+ return null;
+ }
+
+ public String getServletPath()
+ {
+ return "/repository/org/sometest/artifact-0.0.jar";
+ }
+
+ public HttpSession getSession()
+ {
+ return null;
+ }
+
+ public HttpSession getSession( boolean create )
+ {
+ return null;
+ }
+
+ public Principal getUserPrincipal()
+ {
+ return null;
+ }
+
+ public boolean isRequestedSessionIdFromCookie()
+ {
+ return false;
+ }
+
+ public boolean isRequestedSessionIdFromUrl()
+ {
+ return false;
+ }
+
+ public boolean isRequestedSessionIdFromURL()
+ {
+ return false;
+ }
+
+ public boolean isRequestedSessionIdValid()
+ {
+ return false;
+ }
+
+ public boolean isUserInRole( String role )
+ {
+ return false;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.proxy.web.action.test.stub;
+
+/*
+ * Copyright 2005-2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletInputStream;
+import javax.servlet.ServletRequest;
+import java.io.BufferedReader;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+public class ServletRequestStub
+ implements ServletRequest
+{
+
+ public Object getAttribute( String key )
+ {
+ return null;
+ }
+
+ public Enumeration getAttributeNames()
+ {
+ return null;
+ }
+
+ public String getCharacterEncoding()
+ {
+ return null;
+ }
+
+ public int getContentLength()
+ {
+ return -1;
+ }
+
+ public int getRemotePort()
+ {
+ return -1;
+ }
+
+ public int getLocalPort()
+ {
+ return -1;
+ }
+
+ public String getLocalAddr()
+ {
+ return null;
+ }
+
+ public String getLocalName()
+ {
+ return null;
+ }
+
+ public String getContentType()
+ {
+ return null;
+ }
+
+ public ServletInputStream getInputStream()
+ {
+ return null;
+ }
+
+ public Locale getLocale()
+ {
+ return null;
+ }
+
+ public Enumeration getLocales()
+ {
+ return null;
+ }
+
+ public String getParameter( String name )
+ {
+ return null;
+ }
+
+ public Map getParameterMap()
+ {
+ HashMap parameterMap = new HashMap();
+
+ parameterMap.put( "key1", "value1" );
+ parameterMap.put( "key2", "value2" );
+
+ return parameterMap;
+ }
+
+ public Enumeration getParameterNames()
+ {
+ return null;
+ }
+
+ public String[] getParameterValues( String name )
+ {
+ return null;
+ }
+
+ public String getProtocol()
+ {
+ return null;
+ }
+
+ public BufferedReader getReader()
+ {
+ return null;
+ }
+
+ public String getRealPath( String path )
+ {
+ return null;
+ }
+
+ public String getRemoteAddr()
+ {
+ return null;
+ }
+
+ public String getRemoteHost()
+ {
+ return null;
+ }
+
+ public RequestDispatcher getRequestDispatcher( String path )
+ {
+ return null;
+ }
+
+ public String getScheme()
+ {
+ return null;
+ }
+
+ public String getServerName()
+ {
+ return null;
+ }
+
+ public int getServerPort()
+ {
+ return -1;
+ }
+
+ public boolean isSecure()
+ {
+ return false;
+ }
+
+ public void removeAttribute( String name )
+ {
+
+ }
+
+ public void setAttribute( String name, Object value )
+ {
+
+ }
+
+ public void setCharacterEncoding( String env )
+ {
+
+ }
+}
+++ /dev/null
-package org.apache.maven.repository.proxy.web.action.test.stub;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
-import java.security.Principal;
-import java.util.Enumeration;
-
-public class HttpServletRequestStub
- extends ServletRequestStub
- implements HttpServletRequest
-{
-
- public String getAuthType()
- {
- return null;
- }
-
- public String getContextPath()
- {
- return "/location1/location2/location3";
- }
-
- public Cookie[] getCookies()
- {
- return null;
- }
-
- public long getDateHeader( String name )
- {
- return -1;
- }
-
- public String getHeader( String name )
- {
- return null;
- }
-
- public Enumeration getHeaderNames()
- {
- return null;
- }
-
- public Enumeration getHeaders( String name )
- {
- return null;
- }
-
- public int getIntHeader( String name )
- {
- return -1;
- }
-
- public String getMethod()
- {
- return null;
- }
-
- public String getPathInfo()
- {
- return null;
- }
-
- public String getPathTranslated()
- {
- return null;
- }
-
- public String getQueryString()
- {
- return null;
- }
-
- public String getRemoteUser()
- {
- return null;
- }
-
- public String getRequestedSessionId()
- {
- return null;
- }
-
- public String getRequestURI()
- {
- return "/projectname/repository/org/sometest/artifact-0.0.jar";
- }
-
- public StringBuffer getRequestURL()
- {
- return null;
- }
-
- public String getServletPath()
- {
- return "/repository/org/sometest/artifact-0.0.jar";
- }
-
- public HttpSession getSession()
- {
- return null;
- }
-
- public HttpSession getSession( boolean create )
- {
- return null;
- }
-
- public Principal getUserPrincipal()
- {
- return null;
- }
-
- public boolean isRequestedSessionIdFromCookie()
- {
- return false;
- }
-
- public boolean isRequestedSessionIdFromUrl()
- {
- return false;
- }
-
- public boolean isRequestedSessionIdFromURL()
- {
- return false;
- }
-
- public boolean isRequestedSessionIdValid()
- {
- return false;
- }
-
- public boolean isUserInRole( String role )
- {
- return false;
- }
-}
+++ /dev/null
-package org.apache.maven.repository.proxy.web.action.test.stub;
-
-/*
- * Copyright 2005-2006 The Apache Software Foundation.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletInputStream;
-import javax.servlet.ServletRequest;
-import java.io.BufferedReader;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-
-public class ServletRequestStub
- implements ServletRequest
-{
-
- public Object getAttribute( String key )
- {
- return null;
- }
-
- public Enumeration getAttributeNames()
- {
- return null;
- }
-
- public String getCharacterEncoding()
- {
- return null;
- }
-
- public int getContentLength()
- {
- return -1;
- }
-
- public int getRemotePort()
- {
- return -1;
- }
-
- public int getLocalPort()
- {
- return -1;
- }
-
- public String getLocalAddr()
- {
- return null;
- }
-
- public String getLocalName()
- {
- return null;
- }
-
- public String getContentType()
- {
- return null;
- }
-
- public ServletInputStream getInputStream()
- {
- return null;
- }
-
- public Locale getLocale()
- {
- return null;
- }
-
- public Enumeration getLocales()
- {
- return null;
- }
-
- public String getParameter( String name )
- {
- return null;
- }
-
- public Map getParameterMap()
- {
- HashMap parameterMap = new HashMap();
-
- parameterMap.put( "key1", "value1" );
- parameterMap.put( "key2", "value2" );
-
- return parameterMap;
- }
-
- public Enumeration getParameterNames()
- {
- return null;
- }
-
- public String[] getParameterValues( String name )
- {
- return null;
- }
-
- public String getProtocol()
- {
- return null;
- }
-
- public BufferedReader getReader()
- {
- return null;
- }
-
- public String getRealPath( String path )
- {
- return null;
- }
-
- public String getRemoteAddr()
- {
- return null;
- }
-
- public String getRemoteHost()
- {
- return null;
- }
-
- public RequestDispatcher getRequestDispatcher( String path )
- {
- return null;
- }
-
- public String getScheme()
- {
- return null;
- }
-
- public String getServerName()
- {
- return null;
- }
-
- public int getServerPort()
- {
- return -1;
- }
-
- public boolean isSecure()
- {
- return false;
- }
-
- public void removeAttribute( String name )
- {
-
- }
-
- public void setAttribute( String name, Object value )
- {
-
- }
-
- public void setCharacterEncoding( String env )
- {
-
- }
-}
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.maven.repository</groupId>
+ <groupId>org.apache.maven.archiva</groupId>
<artifactId>maven-repository-manager-white-site</artifactId>
<version>1.0-SNAPSHOT</version>
<name>Maven Repository Manager White Site</name>