</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-discoverer</artifactId>
+ <artifactId>archiva-repository-layer</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.shared</groupId>
+ <artifactId>maven-artifact-converter</artifactId>
+ <version>2.0.5-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-/**
- * ConversionEvent
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class ConversionEvent
-{
- public static final int STARTED = 0;
-
- public static final int PROCESSED = 1;
-
- public static final int WARNING = 2;
-
- public static final int ERROR = 3;
-
- public static final int FINISHED = 4;
-
- private int type;
-
- private String message;
-
- private Artifact artifact;
-
- private ArtifactRepository repository;
-
- private Exception exception;
-
- public ConversionEvent( ArtifactRepository repository, int type )
- {
- this.repository = repository;
- this.type = type;
- }
-
- public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact )
- {
- this( repository, type );
- this.artifact = artifact;
- }
-
- public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, String message )
- {
- this( repository, type );
- this.artifact = artifact;
- this.message = message;
- }
-
- public ConversionEvent( ArtifactRepository repository, int type, Artifact artifact, Exception exception )
- {
- this( repository, type );
- this.artifact = artifact;
- this.exception = exception;
- }
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public Exception getException()
- {
- return exception;
- }
-
- public String getMessage()
- {
- return message;
- }
-
- public ArtifactRepository getRepository()
- {
- return repository;
- }
-
- /**
- * <p>
- * The type of event.
- * </p>
- *
- * <p>
- * Can be one of the following ...
- * </p>
- *
- * <ul>
- * <li>{@link #STARTED} - the whole repository conversion process has started.
- * only seen when using the whole repository conversion technique with the
- * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)}
- * method.</li>
- * <li>{@link #PROCESSED} - a specific artifact has been processed.</li>
- * <li>{@link #WARNING} - a warning has been detected for a specific artifact during the conversion process.</li>
- * <li>{@link #ERROR} - an error in the processing of an artifact has been detected.</li>
- * <li>{@link #FINISHED} - the whole repository conversion process has finished.
- * only seen when using the whole repository conversion technique with the
- * {@link LegacyRepositoryConverter#convertLegacyRepository(java.io.File, java.io.File, java.util.List, boolean)}
- * method.</li>
- * </ul>
- * @return
- */
- public int getType()
- {
- return type;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * ConversionListener
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public interface ConversionListener
-{
- public void conversionEvent( ConversionEvent event );
-}
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.maven.archiva.converter.transaction.FileTransaction;
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Metadata;
-import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Snapshot;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.Versioning;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Reader;
-import org.apache.maven.artifact.repository.metadata.io.xpp3.MetadataXpp3Writer;
-import org.apache.maven.model.DistributionManagement;
-import org.apache.maven.model.Model;
-import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ModelConverter;
-import org.apache.maven.model.converter.PomTranslationException;
-import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
-import org.apache.maven.model.v3_0_0.io.xpp3.MavenXpp3Reader;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.i18n.I18N;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Properties;
-import java.util.regex.Matcher;
-
-/**
- * Implementation of repository conversion class.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default"
- */
-public class DefaultRepositoryConverter
- extends AbstractLogEnabled
- implements RepositoryConverter
-{
- /**
- * {@link List}<{@link Digester}>
- *
- * @plexus.requirement role="org.codehaus.plexus.digest.Digester"
- */
- private List digesters;
-
- /**
- * @plexus.requirement
- */
- private ArtifactFactory artifactFactory;
-
- /**
- * @plexus.requirement
- */
- private ModelConverter translator;
-
- /**
- * @plexus.requirement
- */
- private ArtifactHandlerManager artifactHandlerManager;
-
- /**
- * @plexus.configuration default-value="false"
- */
- private boolean force;
-
- /**
- * @plexus.configuration default-value="false"
- */
- private boolean dryrun;
-
- /**
- * @plexus.requirement
- */
- private I18N i18n;
-
- private List listeners = new ArrayList();
-
- public void convert( Artifact artifact, ArtifactRepository targetRepository )
- throws RepositoryConversionException
- {
- if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
- {
- throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
- }
-
- if ( validateMetadata( artifact ) )
- {
- FileTransaction transaction = new FileTransaction();
-
- if ( copyPom( artifact, targetRepository, transaction ) )
- {
- if ( copyArtifact( artifact, targetRepository, transaction ) )
- {
- Metadata metadata = createBaseMetadata( artifact );
- Versioning versioning = new Versioning();
- versioning.addVersion( artifact.getBaseVersion() );
- metadata.setVersioning( versioning );
- updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, transaction );
-
- metadata = createBaseMetadata( artifact );
- metadata.setVersion( artifact.getBaseVersion() );
- versioning = new Versioning();
-
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
- if ( matcher.matches() )
- {
- Snapshot snapshot = new Snapshot();
- snapshot.setBuildNumber( Integer.valueOf( matcher.group( 3 ) ).intValue() );
- snapshot.setTimestamp( matcher.group( 2 ) );
- versioning.setSnapshot( snapshot );
- }
-
- // TODO: merge latest/release/snapshot from source instead
- metadata.setVersioning( versioning );
- updateMetadata( new SnapshotArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
- transaction );
-
- if ( !dryrun )
- {
- transaction.commit();
- }
- }
- }
- }
- }
-
- private static Metadata createBaseMetadata( Artifact artifact )
- {
- Metadata metadata = new Metadata();
- metadata.setArtifactId( artifact.getArtifactId() );
- metadata.setGroupId( artifact.getGroupId() );
- return metadata;
- }
-
- private void updateMetadata( RepositoryMetadata artifactMetadata, ArtifactRepository targetRepository,
- Metadata newMetadata, FileTransaction transaction )
- throws RepositoryConversionException
- {
- File file = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
-
- Metadata metadata;
- boolean changed;
-
- if ( file.exists() )
- {
- metadata = readMetadata( file );
- changed = metadata.merge( newMetadata );
- }
- else
- {
- changed = true;
- metadata = newMetadata;
- }
-
- if ( changed )
- {
- StringWriter writer = null;
- try
- {
- writer = new StringWriter();
-
- MetadataXpp3Writer mappingWriter = new MetadataXpp3Writer();
-
- mappingWriter.write( writer, metadata );
-
- transaction.createFile( writer.toString(), file, digesters );
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error writing target metadata", e );
- }
- finally
- {
- IOUtils.closeQuietly( writer );
- }
- }
- }
-
- private Metadata readMetadata( File file )
- throws RepositoryConversionException
- {
- Metadata metadata;
- MetadataXpp3Reader reader = new MetadataXpp3Reader();
- FileReader fileReader = null;
- try
- {
- fileReader = new FileReader( file );
- metadata = reader.read( fileReader );
- }
- catch ( FileNotFoundException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- catch ( XmlPullParserException e )
- {
- throw new RepositoryConversionException( "Error reading target metadata", e );
- }
- finally
- {
- IOUtils.closeQuietly( fileReader );
- }
- return metadata;
- }
-
- private boolean validateMetadata( Artifact artifact )
- throws RepositoryConversionException
- {
- ArtifactRepository repository = artifact.getRepository();
-
- boolean result = true;
-
- RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
- File file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- if ( file.exists() )
- {
- Metadata metadata = readMetadata( file );
- result = validateMetadata( metadata, repositoryMetadata, artifact );
- }
-
- repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
- if ( file.exists() )
- {
- Metadata metadata = readMetadata( file );
- result = result && validateMetadata( metadata, repositoryMetadata, artifact );
- }
-
- return result;
- }
-
- private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
- {
- String groupIdKey;
- String artifactIdKey = null;
- String snapshotKey = null;
- String versionKey = null;
- String versionsKey = null;
-
- if ( repositoryMetadata.storedInGroupDirectory() )
- {
- groupIdKey = "failure.incorrect.groupMetadata.groupId";
- }
- else if ( repositoryMetadata.storedInArtifactVersionDirectory() )
- {
- groupIdKey = "failure.incorrect.snapshotMetadata.groupId";
- artifactIdKey = "failure.incorrect.snapshotMetadata.artifactId";
- versionKey = "failure.incorrect.snapshotMetadata.version";
- snapshotKey = "failure.incorrect.snapshotMetadata.snapshot";
- }
- else
- {
- groupIdKey = "failure.incorrect.artifactMetadata.groupId";
- artifactIdKey = "failure.incorrect.artifactMetadata.artifactId";
- versionsKey = "failure.incorrect.artifactMetadata.versions";
- }
-
- boolean result = true;
-
- if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
- {
- addFailure( artifact, groupIdKey );
- result = false;
- }
- if ( !repositoryMetadata.storedInGroupDirectory() )
- {
- if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
- {
- addFailure( artifact, artifactIdKey );
- result = false;
- }
- if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
- {
- // artifact metadata
-
- boolean foundVersion = false;
- if ( metadata.getVersioning() != null )
- {
- for ( Iterator i = metadata.getVersioning().getVersions().iterator(); i.hasNext() && !foundVersion; )
- {
- String version = (String) i.next();
- if ( version.equals( artifact.getBaseVersion() ) )
- {
- foundVersion = true;
- }
- }
- }
-
- if ( !foundVersion )
- {
- addFailure( artifact, versionsKey );
- result = false;
- }
- }
- else
- {
- // snapshot metadata
- if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
- {
- addFailure( artifact, versionKey );
- result = false;
- }
-
- if ( artifact.isSnapshot() )
- {
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( artifact.getVersion() );
- if ( matcher.matches() )
- {
- boolean correct = false;
- if ( metadata.getVersioning() != null && metadata.getVersioning().getSnapshot() != null )
- {
- Snapshot snapshot = metadata.getVersioning().getSnapshot();
- int build = Integer.valueOf( matcher.group( 3 ) ).intValue();
- String ts = matcher.group( 2 );
- if ( build == snapshot.getBuildNumber() && ts.equals( snapshot.getTimestamp() ) )
- {
- correct = true;
- }
- }
-
- if ( !correct )
- {
- addFailure( artifact, snapshotKey );
- result = false;
- }
- }
- }
- }
- }
- return result;
- }
-
- private void addFailure( Artifact artifact, String key )
- {
- addFailureWithReason( artifact, getI18NString( key ) );
- }
-
- private void addWarning( Artifact artifact, String message )
- {
- // TODO: should we be able to identify/fix these?
- // TODO: write archiva-artifact-repair module
- triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.WARNING, artifact,
- message ) );
- }
-
- private void addFailureWithReason( Artifact artifact, String reason )
- {
- // TODO: should we be able to identify/fix these?
- triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.ERROR, artifact, reason ) );
- }
-
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
- throws RepositoryConversionException
- {
- Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact
- .getVersion() );
- pom.setBaseVersion( artifact.getBaseVersion() );
- ArtifactRepository repository = artifact.getRepository();
- File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
-
- boolean result = true;
- if ( file.exists() )
- {
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pom ) );
-
- String contents = null;
- boolean checksumsValid = false;
- try
- {
- if ( testChecksums( artifact, file ) )
- {
- checksumsValid = true;
- }
-
- // Even if the checksums for the POM are invalid we should still convert the POM
- contents = FileUtils.readFileToString( file, null );
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to read source POM: " + e.getMessage(), e );
- }
-
- if ( checksumsValid && contents.indexOf( "modelVersion" ) >= 0 )
- {
- // v4 POM
- try
- {
- boolean matching = false;
- if ( !force && targetFile.exists() )
- {
- String targetContents = FileUtils.readFileToString( targetFile, null );
- matching = targetContents.equals( contents );
- }
- if ( force || !matching )
- {
- transaction.createFile( contents, targetFile, digesters );
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to write target POM: " + e.getMessage(), e );
- }
- }
- else
- {
- // v3 POM
- StringReader stringReader = new StringReader( contents );
- StringWriter writer = null;
- try
- {
- MavenXpp3Reader v3Reader = new MavenXpp3Reader();
- org.apache.maven.model.v3_0_0.Model v3Model = v3Reader.read( stringReader );
-
- if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
- {
- Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
- .getArtifactId(), artifact.getVersion() );
- targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
- }
-
- Model v4Model = translator.translate( v3Model );
-
- translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), v3Model
- .getVersion(), v3Model.getPackage() );
-
- writer = new StringWriter();
- MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
- Xpp3Writer.write( writer, v4Model );
-
- transaction.createFile( writer.toString(), targetFile, digesters );
-
- List warnings = translator.getWarnings();
-
- for ( Iterator i = warnings.iterator(); i.hasNext(); )
- {
- String message = (String) i.next();
- addWarning( artifact, message );
- }
- }
- catch ( XmlPullParserException e )
- {
- addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
- result = false;
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Unable to write converted POM", e );
- }
- catch ( PomTranslationException e )
- {
- addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
- result = false;
- }
- finally
- {
- IOUtils.closeQuietly( writer );
- }
- }
- }
- else
- {
- addWarning( artifact, getI18NString( "warning.missing.pom" ) );
- }
- return result;
- }
-
- private boolean doRelocation( Artifact artifact, org.apache.maven.model.v3_0_0.Model v3Model,
- ArtifactRepository repository, FileTransaction transaction )
- throws IOException
- {
- Properties properties = v3Model.getProperties();
- if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" )
- || properties.containsKey( "relocated.version" ) )
- {
- String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
- properties.remove( "relocated.groupId" );
-
- String newArtifactId = properties.getProperty( "relocated.artifactId", v3Model.getArtifactId() );
- properties.remove( "relocated.artifactId" );
-
- String newVersion = properties.getProperty( "relocated.version", v3Model.getVersion() );
- properties.remove( "relocated.version" );
-
- String message = properties.getProperty( "relocated.message", "" );
- properties.remove( "relocated.message" );
-
- if ( properties.isEmpty() )
- {
- v3Model.setProperties( null );
- }
-
- writeRelocationPom( v3Model.getGroupId(), v3Model.getArtifactId(), v3Model.getVersion(), newGroupId,
- newArtifactId, newVersion, message, repository, transaction );
-
- v3Model.setGroupId( newGroupId );
- v3Model.setArtifactId( newArtifactId );
- v3Model.setVersion( newVersion );
-
- artifact.setGroupId( newGroupId );
- artifact.setArtifactId( newArtifactId );
- artifact.setVersion( newVersion );
-
- return true;
- }
- else
- {
- return false;
- }
- }
-
- private void writeRelocationPom( String groupId, String artifactId, String version, String newGroupId,
- String newArtifactId, String newVersion, String message,
- ArtifactRepository repository, FileTransaction transaction )
- throws IOException
- {
- Model pom = new Model();
- pom.setGroupId( groupId );
- pom.setArtifactId( artifactId );
- pom.setVersion( version );
-
- DistributionManagement dMngt = new DistributionManagement();
-
- Relocation relocation = new Relocation();
- relocation.setGroupId( newGroupId );
- relocation.setArtifactId( newArtifactId );
- relocation.setVersion( newVersion );
- if ( message != null && message.length() > 0 )
- {
- relocation.setMessage( message );
- }
-
- dMngt.setRelocation( relocation );
-
- pom.setDistributionManagement( dMngt );
-
- Artifact artifact = artifactFactory.createBuildArtifact( groupId, artifactId, version, "pom" );
- File pomFile = new File( repository.getBasedir(), repository.pathOf( artifact ) );
-
- StringWriter strWriter = new StringWriter();
- MavenXpp3Writer pomWriter = new MavenXpp3Writer();
- pomWriter.write( strWriter, pom );
-
- transaction.createFile( strWriter.toString(), pomFile, digesters );
- }
-
- private String getI18NString( String key, String arg0 )
- {
- return i18n.format( getClass().getName(), Locale.getDefault(), key, arg0 );
- }
-
- private String getI18NString( String key )
- {
- return i18n.getString( getClass().getName(), Locale.getDefault(), key );
- }
-
- private boolean testChecksums( Artifact artifact, File file )
- throws IOException
- {
- boolean result = true;
- Iterator it = digesters.iterator();
- while ( it.hasNext() )
- {
- Digester digester = (Digester) it.next();
- result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
- artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
- }
- return result;
- }
-
- /**
- * File extension for checksums
- * TODO should be moved to plexus-digester ?
- */
- private String getDigesterFileExtension( Digester digester )
- {
- return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
- }
-
- private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact, String key )
- throws IOException
- {
- boolean result = true;
-
- File checksumFile = new File( file.getParentFile(), fileName );
- if ( checksumFile.exists() )
- {
- String checksum = FileUtils.readFileToString( checksumFile, null );
- try
- {
- digester.verify( file, checksum );
- }
- catch ( DigesterException e )
- {
- addFailure( artifact, key );
- result = false;
- }
- }
- return result;
- }
-
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
- throws RepositoryConversionException
- {
- File sourceFile = artifact.getFile();
-
- if ( sourceFile.getAbsolutePath().indexOf( "/plugins/" ) > -1 )
- {
- artifact.setArtifactHandler( artifactHandlerManager.getArtifactHandler( "maven-plugin" ) );
- }
-
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-
- boolean result = true;
- try
- {
- boolean matching = false;
- if ( !force && targetFile.exists() )
- {
- matching = FileUtils.contentEquals( sourceFile, targetFile );
- if ( !matching )
- {
- addFailure( artifact, "failure.target.already.exists" );
- result = false;
- }
- }
- if ( result )
- {
- if ( force || !matching )
- {
- if ( testChecksums( artifact, sourceFile ) )
- {
- transaction.copyFile( sourceFile, targetFile, digesters );
- }
- else
- {
- result = false;
- }
- }
- }
- }
- catch ( IOException e )
- {
- throw new RepositoryConversionException( "Error copying artifact", e );
- }
- return result;
- }
-
- public void convert( List artifacts, ArtifactRepository targetRepository )
- throws RepositoryConversionException
- {
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
-
- try
- {
- convert( artifact, targetRepository );
- }
- catch ( RepositoryConversionException e )
- {
- triggerConversionEvent( new ConversionEvent( targetRepository, ConversionEvent.ERROR, artifact, e ) );
- }
- }
- }
-
- /**
- * Add a listener to the conversion process.
- *
- * @param listener the listener to add.
- */
- public void addConversionListener( ConversionListener listener )
- {
- listeners.add( listener );
- }
-
- /**
- * Remove a listener from the conversion process.
- *
- * @param listener the listener to remove.
- */
- public void removeConversionListener( ConversionListener listener )
- {
- listeners.remove( listener );
- }
-
- private void triggerConversionEvent( ConversionEvent event )
- {
- Iterator it = listeners.iterator();
- while ( it.hasNext() )
- {
- ConversionListener listener = (ConversionListener) it.next();
-
- try
- {
- listener.conversionEvent( event );
- }
- catch ( Throwable t )
- {
- getLogger().warn( "ConversionEvent resulted in exception from listener: " + t.getMessage(), t );
- }
- }
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.artifact.Artifact;
-import org.apache.maven.artifact.repository.ArtifactRepository;
-
-import java.util.List;
-
-/**
- * Copy a set of artifacts from one repository to the other, converting if necessary.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface RepositoryConverter
-{
- String ROLE = RepositoryConverter.class.getName();
-
- /**
- * Convert a single artifact, writing it into the target repository.
- *
- * @param artifact the artifact to convert
- * @param targetRepository the target repository
- */
- void convert( Artifact artifact, ArtifactRepository targetRepository )
- throws RepositoryConversionException;
-
- /**
- * Convert a set of artifacts, writing them into the target repository.
- *
- * @param artifacts the set of artifacts to convert
- * @param targetRepository the target repository
- */
- void convert( List artifacts, ArtifactRepository targetRepository )
- throws RepositoryConversionException;
-
- /**
- * Add a listener to the conversion process.
- *
- * @param listener the listener to add.
- */
- void addConversionListener( ConversionListener listener );
-
- /**
- * Remove a listener from the conversion process.
- *
- * @param listener the listener to remove.
- */
- void removeConversionListener( ConversionListener listener );
-}
*/
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.converter.ConversionListener;
+import org.apache.maven.archiva.common.utils.PathUtil;
import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.discoverer.Discoverer;
-import org.apache.maven.archiva.discoverer.DiscovererException;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import java.io.File;
-import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
/**
- * @author Jason van Zyl
- * @plexus.component
- * @todo turn this into a general conversion component and hide all this crap here.
- * @todo it should be possible to move this to the converter module without causing it to gain additional dependencies
+ * DefaultLegacyRepositoryConverter
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ * @plexus.component
*/
public class DefaultLegacyRepositoryConverter
implements LegacyRepositoryConverter
{
- /**
- * @plexus.requirement role-hint="legacy"
- */
- private ArtifactRepositoryLayout legacyLayout;
-
- /**
- * @plexus.requirement role-hint="default"
- */
- private ArtifactRepositoryLayout defaultLayout;
-
/**
* @plexus.requirement
*/
/**
* @plexus.requirement role-hint="default"
*/
- private Discoverer discoverer;
+ private ArtifactRepositoryLayout defaultLayout;
/**
- * @plexus.requirement role="org.apache.maven.archiva.common.consumers.Consumer" role-hint="legacy-converter"
+ * @plexus.requirement role="org.apache.maven.archiva.consumers.RepositoryContentConsumer"
+ * role-hint="artifact-legacy-to-default-converter"
*/
private LegacyConverterArtifactConsumer legacyConverterConsumer;
public void convertLegacyRepository( File legacyRepositoryDirectory, File repositoryDirectory,
- List fileExclusionPatterns, boolean includeSnapshots )
+ List fileExclusionPatterns )
throws RepositoryConversionException
{
- ArtifactRepository legacyRepository;
-
- ArtifactRepository repository;
-
try
{
- String legacyRepositoryDir = legacyRepositoryDirectory.toURI().toURL().toString();
- String repositoryDir = repositoryDirectory.toURI().toURL().toString();
+ String legacyRepositoryUrl = PathUtil.toUrl( legacyRepositoryDirectory );
+ String defaultRepositoryUrl = PathUtil.toUrl( repositoryDirectory );
- //workaround for spaces non converted by PathUtils in wagon
- //TODO: remove it when PathUtils will be fixed
- if ( legacyRepositoryDir.indexOf( "%20" ) >= 0 )
+ // workaround for spaces non converted by PathUtils in wagon
+ // TODO: remove it when PathUtils will be fixed
+ if ( legacyRepositoryUrl.indexOf( "%20" ) >= 0 )
{
- legacyRepositoryDir = StringUtils.replace( legacyRepositoryDir, "%20", " " );
+ legacyRepositoryUrl = StringUtils.replace( legacyRepositoryUrl, "%20", " " );
}
- if ( repositoryDir.indexOf( "%20" ) >= 0 )
+ if ( defaultRepositoryUrl.indexOf( "%20" ) >= 0 )
{
- repositoryDir = StringUtils.replace( repositoryDir, "%20", " " );
+ defaultRepositoryUrl = StringUtils.replace( defaultRepositoryUrl, "%20", " " );
}
- legacyRepository = artifactRepositoryFactory.createArtifactRepository( "legacy", legacyRepositoryDir,
- legacyLayout, null, null );
+ ArchivaRepository legacyRepository = new ArchivaRepository( "legacy", "Legacy Repository",
+ legacyRepositoryUrl );
+ legacyRepository.getModel().setLayoutName( "legacy" );
- repository = artifactRepositoryFactory.createArtifactRepository( "default", repositoryDir, defaultLayout,
- null, null );
- }
- catch ( MalformedURLException e )
- {
- throw new RepositoryConversionException( "Error convering legacy repository.", e );
- }
+ ArtifactRepository repository = artifactRepositoryFactory.createArtifactRepository( "default",
+ defaultRepositoryUrl,
+ defaultLayout, null,
+ null );
+ legacyConverterConsumer.setExcludes( fileExclusionPatterns );
+ legacyConverterConsumer.setDestinationRepository( repository );
- try
- {
List consumers = new ArrayList();
- legacyConverterConsumer.setDestinationRepository( repository );
consumers.add( legacyConverterConsumer );
- discoverer.walkRepository( legacyRepository, consumers, includeSnapshots );
+ RepositoryScanner scanner = new RepositoryScanner();
+ scanner.scan( legacyRepository, consumers, true );
}
- catch ( DiscovererException e )
+ catch ( RepositoryException e )
{
- throw new RepositoryConversionException(
- "Unable to convert repository due to discoverer error:" + e.getMessage(), e );
+ throw new RepositoryConversionException( "Error convering legacy repository.", e );
}
}
-
- /**
- * Add a listener to the conversion process.
- *
- * @param listener the listener to add.
- */
- public void addConversionListener( ConversionListener listener )
- {
- legacyConverterConsumer.addConversionListener( listener );
- }
-
- /**
- * Remove a listener from the conversion process.
- *
- * @param listener the listener to remove.
- */
- public void removeConversionListener( ConversionListener listener )
- {
- legacyConverterConsumer.removeConversionListener( listener );
- }
}
* under the License.
*/
-import org.apache.maven.archiva.common.utils.BaseFile;
-import org.apache.maven.archiva.consumers.GenericArtifactConsumer;
-import org.apache.maven.archiva.converter.ConversionListener;
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.apache.maven.archiva.converter.RepositoryConverter;
+import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
+import org.apache.maven.archiva.consumers.ConsumerException;
+import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
+import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
+import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.artifact.Artifact;
+import org.apache.maven.artifact.converter.ArtifactConversionException;
+import org.apache.maven.artifact.converter.ArtifactConverter;
+import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
+import java.util.ArrayList;
+import java.util.List;
+
/**
* LegacyConverterArtifactConsumer - convert artifacts as they are found
* into the destination repository.
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
- * @plexus.component role="org.apache.maven.archiva.common.consumers.Consumers"
- * role-hint="legacy-converter"
+ * @plexus.component role="org.apache.maven.archiva.consumers.RepositoryContentConsumer"
+ * role-hint="artifact-legacy-to-default-converter"
* instantiation-strategy="per-lookup"
*/
public class LegacyConverterArtifactConsumer
- extends GenericArtifactConsumer
+ extends AbstractMonitoredConsumer
+ implements RepositoryContentConsumer
{
+ /**
+ * @plexus.requirement role-hint="legacy-to-default"
+ */
+ private ArtifactConverter artifactConverter;
+
/**
* @plexus.requirement
*/
- private RepositoryConverter repositoryConverter;
+ private ArtifactFactory artifactFactory;
+
+ /**
+ * @plexus.requirement role-hint="legacy"
+ */
+ private BidirectionalRepositoryLayout bidirectionalLayout;
private ArtifactRepository destinationRepository;
- public void processArtifact( Artifact artifact, BaseFile file )
+ private List includes;
+
+ private List excludes;
+
+ public LegacyConverterArtifactConsumer()
+ {
+ includes = new ArrayList();
+ includes.add( "**/*.jar" );
+ includes.add( "**/*.ear" );
+ includes.add( "**/*.war" );
+ }
+
+ public void beginScan( ArchivaRepository repository )
+ throws ConsumerException
+ {
+
+ }
+
+ public void completeScan()
+ {
+
+ }
+
+ public List getExcludes()
+ {
+ return excludes;
+ }
+
+ public List getIncludes()
+ {
+ return includes;
+ }
+
+ public void processFile( String path )
+ throws ConsumerException
{
try
{
- repositoryConverter.convert( artifact, destinationRepository );
+ ArtifactReference reference = bidirectionalLayout.toArtifactReference( path );
+ Artifact artifact = artifactFactory.createArtifact( reference.getGroupId(), reference.getArtifactId(),
+ reference.getVersion(), reference.getClassifier(),
+ reference.getType() );
+ artifactConverter.convert( artifact, destinationRepository );
}
- catch ( RepositoryConversionException e )
+ catch ( LayoutException e )
{
- getLogger().error(
- "Unable to convert artifact " + artifact + " to destination repository "
- + destinationRepository, e );
+ getLogger().warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
+ }
+ catch ( ArtifactConversionException e )
+ {
+ getLogger().warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
}
}
- public void processFileProblem( BaseFile path, String message )
+ public String getDescription()
{
- getLogger().error( "Artifact Build Failure on " + path + " : " + message );
- // TODO: report this to the ConversionListener?
+ return "Legacy Artifact to Default Artifact Converter";
}
- public ArtifactRepository getDestinationRepository()
+ public String getId()
{
- return destinationRepository;
+ return "artifact-legacy-to-default-converter";
}
- public void setDestinationRepository( ArtifactRepository destinationRepository )
+ public boolean isPermanent()
{
- this.destinationRepository = destinationRepository;
+ return false;
}
-
- public String getName()
+
+ public void setExcludes( List excludes )
{
- return "Legacy Artifact Converter Consumer";
+ this.excludes = excludes;
}
-
- /**
- * Add a listener to the conversion process.
- *
- * @param listener the listener to add.
- */
- public void addConversionListener( ConversionListener listener )
+
+ public void setIncludes( List includes )
{
- repositoryConverter.addConversionListener( listener );
+ this.includes = includes;
}
- /**
- * Remove a listener from the conversion process.
- *
- * @param listener the listener to remove.
- */
- public void removeConversionListener( ConversionListener listener )
+ public ArtifactRepository getDestinationRepository()
{
- repositoryConverter.removeConversionListener( listener );
- }
+ return destinationRepository;
+ }
+
+ public void setDestinationRepository( ArtifactRepository destinationRepository )
+ {
+ this.destinationRepository = destinationRepository;
+ }
}
* under the License.
*/
-import org.apache.maven.archiva.converter.ConversionListener;
import org.apache.maven.archiva.converter.RepositoryConversionException;
import java.io.File;
* @param legacyRepositoryDirectory the directory of the legacy repository.
* @param destinationRepositoryDirectory the directory of the modern repository.
* @param fileExclusionPatterns the list of patterns to exclude from the conversion.
- * @param includeSnapshots true to include snapshots in conversion or not.
* @throws RepositoryConversionException
*/
void convertLegacyRepository( File legacyRepositoryDirectory, File destinationRepositoryDirectory,
- List fileExclusionPatterns, boolean includeSnapshots )
+ List fileExclusionPatterns )
throws RepositoryConversionException;
-
- /**
- * Add a listener to the conversion process.
- *
- * @param listener the listener to add.
- */
- void addConversionListener( ConversionListener listener );
-
- /**
- * Remove a listener from the conversion process.
- *
- * @param listener the listener to remove.
- */
- void removeConversionListener( ConversionListener listener );
}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.digest.Digester;
-import org.codehaus.plexus.digest.DigesterException;
-import org.codehaus.plexus.logging.AbstractLogEnabled;
-
-/**
- * Abstract class for the TransactionEvents
- *
- * @author Edwin Punzalan
- * @author <a href="mailto:carlos@apache.org">Carlos Sanchez</a>
- * @version $Id$
- */
-public abstract class AbstractTransactionEvent
- extends AbstractLogEnabled
- implements TransactionEvent
-{
- private Map backups = new HashMap();;
-
- private List createdDirs = new ArrayList();
-
- private List createdFiles = new ArrayList();
-
- /** {@link List}<{@link Digester}> */
- private List digesters;
-
- protected AbstractTransactionEvent()
- {
- this( new ArrayList( 0 ) );
- }
-
- protected AbstractTransactionEvent( List digesters )
- {
- this.digesters = digesters;
- }
-
- protected List getDigesters()
- {
- return digesters;
- }
-
- /**
- * Method that creates a directory as well as all the parent directories needed
- *
- * @param dir The File directory to be created
- * @throws IOException when an unrecoverable error occurred
- */
- protected void mkDirs( File dir )
- throws IOException
- {
- List createDirs = new ArrayList();
-
- File parent = dir;
- while ( !parent.exists() || !parent.isDirectory() )
- {
- createDirs.add( parent );
-
- parent = parent.getParentFile();
- }
-
- while ( !createDirs.isEmpty() )
- {
- File directory = (File) createDirs.remove( createDirs.size() - 1 );
-
- if ( directory.mkdir() )
- {
- createdDirs.add( directory );
- }
- else
- {
- throw new IOException( "Failed to create directory: " + directory.getAbsolutePath() );
- }
- }
- }
-
- protected void revertMkDirs()
- throws IOException
- {
- if ( createdDirs != null )
- {
- Collections.reverse( createdDirs );
-
- while ( !createdDirs.isEmpty() )
- {
- File dir = (File) createdDirs.remove( 0 );
-
- if ( dir.isDirectory() && dir.list().length == 0 )
- {
- FileUtils.deleteDirectory( dir );
- }
- else
- {
- //cannot rollback created directory if it still contains files
- break;
- }
- }
- }
- }
-
- protected void revertFilesCreated()
- throws IOException
- {
- Iterator it = createdFiles.iterator();
- while ( it.hasNext() )
- {
- File file = (File) it.next();
- file.delete();
- it.remove();
- }
- }
-
- protected void createBackup( File file )
- throws IOException
- {
- if ( file.exists() && file.isFile() )
- {
- File backup = File.createTempFile( "temp-", ".backup" );
-
- FileUtils.copyFile( file, backup );
-
- backup.deleteOnExit();
-
- backups.put( file, backup );
- }
- }
-
- protected void restoreBackups()
- throws IOException
- {
- Iterator it = backups.entrySet().iterator();
- while ( it.hasNext() )
- {
- Map.Entry entry = (Map.Entry) it.next();
- FileUtils.copyFile( (File) entry.getValue(), (File) entry.getKey() );
- }
- }
-
- protected void restoreBackup( File file )
- throws IOException
- {
- File backup = (File) backups.get( file );
- if ( backup != null )
- {
- FileUtils.copyFile( backup, file );
- }
- }
-
- /**
- * Create checksums of file using all digesters defined at construction time.
- *
- * @param file
- * @param force whether existing checksums should be overwritten or not
- * @throws IOException
- */
- protected void createChecksums( File file, boolean force )
- throws IOException
- {
- Iterator it = getDigesters().iterator();
- while ( it.hasNext() )
- {
- Digester digester = (Digester) it.next();
- File checksumFile = new File( file.getAbsolutePath() + "." + getDigesterFileExtension( digester ) );
- if ( checksumFile.exists() )
- {
- if ( !force )
- {
- continue;
- }
- createBackup( checksumFile );
- }
- else
- {
- createdFiles.add( checksumFile );
- }
- try
- {
- FileUtils.writeStringToFile( checksumFile, digester.calc( file ), null );
- }
- catch ( DigesterException e )
- {
- // the Digester API just wraps IOException and should be fixed
- throw (IOException) e.getCause();
- }
- }
- }
-
- /**
- * File extension for checksums
- * TODO should be moved to plexus-digester ?
- */
- protected String getDigesterFileExtension( Digester digester )
- {
- return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.digest.Digester;
-
-/**
- * Event to copy a file.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @author <a href="mailto:carlos@apache.org">Carlos Sanchez</a>
- * @version $Id$
- */
-public class CopyFileEvent
- extends AbstractTransactionEvent
-{
- private final File source;
-
- private final File destination;
-
- /**
- * Creates a copy file event with no digesters
- *
- * @deprecated use other constructors
- *
- * @param source
- * @param destination
- */
- public CopyFileEvent( File source, File destination )
- {
- this( source, destination, new ArrayList( 0 ) );
- }
-
- /**
- *
- * @param source
- * @param destination
- * @param digesters {@link List}<{@link Digester}> digesters to use for checksumming
- */
- public CopyFileEvent( File source, File destination, List digesters )
- {
- super( digesters );
- this.source = source;
- this.destination = destination;
- }
-
- public void commit()
- throws IOException
- {
- createBackup( destination );
-
- mkDirs( destination.getParentFile() );
-
- FileUtils.copyFile( source, destination );
-
- createChecksums( destination, true );
- copyChecksums();
-
- copyChecksum( "asc" );
- }
-
- /**
- * Copy checksums of source file with all digesters if exist
- *
- * @throws IOException
- */
- private void copyChecksums()
- throws IOException
- {
- Iterator it = getDigesters().iterator();
- while ( it.hasNext() )
- {
- Digester digester = (Digester) it.next();
- copyChecksum( getDigesterFileExtension( digester ) );
- }
- }
-
- /**
- * Copy checksum of source file with extension provided if exists
- *
- * @param extension
- * @return whether the checksum exists or not
- * @throws IOException
- */
- private boolean copyChecksum( String extension )
- throws IOException
- {
- File checksumSource = new File( source.getAbsolutePath() + "." + extension );
- if ( checksumSource.exists() )
- {
- File checksumDestination = new File( destination.getAbsolutePath() + "." + extension );
- FileUtils.copyFile( checksumSource, checksumDestination );
- return true;
- }
- return false;
- }
-
- public void rollback()
- throws IOException
- {
- destination.delete();
-
- revertFilesCreated();
-
- revertMkDirs();
-
- restoreBackups();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.digest.Digester;
-
-/**
- * Event for creating a file from a string content.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- * @author <a href="mailto:carlos@apache.org">Carlos Sanchez</a>
- * @version $Id$
- */
-public class CreateFileEvent
- extends AbstractTransactionEvent
-{
- private final File destination;
-
- private final String content;
-
- /**
- * Creates a create file event with no digesters
- *
- * @deprecated use other constructors
- *
- * @param content
- * @param destination
- */
- public CreateFileEvent( String content, File destination )
- {
- this( content, destination, new ArrayList( 0 ) );
- }
-
- /**
- *
- * @param content
- * @param destination
- * @param digesters {@link List}<{@link Digester}> digesters to use for checksumming
- */
- public CreateFileEvent( String content, File destination, List digesters )
- {
- super( digesters );
- this.content = content;
- this.destination = destination;
- }
-
- public void commit()
- throws IOException
- {
- createBackup( destination );
-
- mkDirs( destination.getParentFile() );
-
- if ( !destination.exists() && !destination.createNewFile() )
- {
- throw new IOException( "Unable to create new file" );
- }
-
- FileUtils.writeStringToFile( destination, content, null );
-
- createChecksums( destination, true );
- }
-
- public void rollback()
- throws IOException
- {
- destination.delete();
-
- revertFilesCreated();
-
- revertMkDirs();
-
- restoreBackups();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.converter.RepositoryConversionException;
-import org.codehaus.plexus.digest.Digester;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Implement commit/rollback semantics for a set of files.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public class FileTransaction
-{
- private List events = new ArrayList();
-
- public void commit()
- throws RepositoryConversionException
- {
- List toRollback = new ArrayList( events.size() );
-
- for ( Iterator i = events.iterator(); i.hasNext(); )
- {
- TransactionEvent event = (TransactionEvent) i.next();
-
- try
- {
- event.commit();
-
- toRollback.add( event );
- }
- catch ( IOException e )
- {
- try
- {
- rollback( toRollback );
-
- throw new RepositoryConversionException( "Unable to commit file transaction", e );
- }
- catch ( IOException ioe )
- {
- throw new RepositoryConversionException(
- "Unable to commit file transaction, and rollback failed with error: '" + ioe.getMessage() + "'",
- e );
- }
- }
- }
- }
-
- private void rollback( List toRollback )
- throws IOException
- {
- for ( Iterator i = toRollback.iterator(); i.hasNext(); )
- {
- TransactionEvent event = (TransactionEvent) i.next();
-
- event.rollback();
- }
- }
-
- /**
- * @deprecated use {@link #copyFile(File, File, List)}
- * @param source
- * @param destination
- */
- public void copyFile( File source, File destination )
- {
- copyFile( source, destination, Collections.EMPTY_LIST );
- }
-
- /**
- *
- * @param source
- * @param destination
- * @param digesters {@link List}<{@link Digester}> digesters to use for checksumming
- */
- public void copyFile( File source, File destination, List digesters )
- {
- events.add( new CopyFileEvent( source, destination, digesters ) );
- }
-
- /**
- * @deprecated use {@link #createFile(String, File, List)}
- * @param content
- * @param destination
- */
- public void createFile( String content, File destination )
- {
- createFile( content, destination, Collections.EMPTY_LIST );
- }
-
- /**
- *
- * @param content
- * @param destination
- * @param digesters {@link List}<{@link Digester}> digesters to use for checksumming
- */
- public void createFile( String content, File destination, List digesters )
- {
- events.add( new CreateFileEvent( content, destination, digesters ) );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.IOException;
-
-/**
- * Interface for individual events in a transaction.
- *
- * @author <a href="mailto:brett@apache.org">Brett Porter</a>
- */
-public interface TransactionEvent
-{
- /**
- * Commit this event.
- *
- * @throws IOException if an error occurred committing the change
- */
- void commit()
- throws IOException;
-
- /**
- * Rollback the even already committed.
- *
- * @throws IOException if an error occurred reverting the change
- */
- void rollback()
- throws IOException;
-}
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * AllTests
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class AllTests
-{
-
- public static Test suite()
- {
- TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter" );
- //$JUnit-BEGIN$
- suite.addTest( org.apache.maven.archiva.converter.transaction.AllTests.suite() );
- suite.addTestSuite( RepositoryConverterTest.class );
- //$JUnit-END$
- return suite;
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.converter;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.artifact.Artifact;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * MockConversionListener
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class MockConversionListener
- implements ConversionListener
-{
- private Map warnings = new HashMap();
-
- private Map errors = new HashMap();
-
- private Map exceptions = new HashMap();
-
- private List processed = new ArrayList();
-
- private List repositories = new ArrayList();
-
- public void conversionEvent( ConversionEvent event )
- {
- switch ( event.getType() )
- {
- case ConversionEvent.STARTED:
- addUnique( repositories, event.getRepository() );
- break;
- case ConversionEvent.PROCESSED:
- addUnique( processed, event.getArtifact() );
- break;
- case ConversionEvent.WARNING:
- if ( event.getException() != null )
- {
- addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
- }
-
- if ( event.getMessage() != null )
- {
- addObjectList( warnings, toKey( event.getArtifact() ), event.getMessage() );
- }
- break;
- case ConversionEvent.ERROR:
- if ( event.getException() != null )
- {
- addObjectList( exceptions, toKey( event.getArtifact() ), event.getException() );
- }
-
- if ( event.getMessage() != null )
- {
- addObjectList( errors, toKey( event.getArtifact() ), event.getMessage() );
- }
- break;
- case ConversionEvent.FINISHED:
- addUnique( repositories, event.getRepository() );
- break;
- }
- }
-
- public String toKey( Artifact artifact )
- {
- return StringUtils.defaultString( artifact.getGroupId() ) + ":"
- + StringUtils.defaultString( artifact.getArtifactId() ) + ":"
- + StringUtils.defaultString( artifact.getVersion() ) + ":" + StringUtils.defaultString( artifact.getType() )
- + ":" + StringUtils.defaultString( artifact.getClassifier() );
- }
-
- private void addObjectList( Map map, String key, Object value )
- {
- List objlist = (List) map.get( key );
- if ( objlist == null )
- {
- objlist = new ArrayList();
- }
-
- objlist.add( value );
-
- map.put( key, objlist );
- }
-
- private void addUnique( Collection collection, Object obj )
- {
- if ( !collection.contains( obj ) )
- {
- collection.add( obj );
- }
- }
-
- public Map getErrors()
- {
- return errors;
- }
-
- public Map getExceptions()
- {
- return exceptions;
- }
-
- public List getProcessed()
- {
- return processed;
- }
-
- public List getRepositories()
- {
- return repositories;
- }
-
- public Map getWarnings()
- {
- return warnings;
- }
-
- private int getObjectListCount( Map map )
- {
- int count = 0;
- for ( Iterator it = map.values().iterator(); it.hasNext(); )
- {
- List objList = (List) it.next();
- count += objList.size();
- }
- return count;
- }
-
- public int getWarningMessageCount()
- {
- return getObjectListCount( warnings );
- }
-
- public int getErrorMessageCount()
- {
- return getObjectListCount( errors );
- }
-}
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.artifact.Artifact;
+import org.apache.maven.archiva.common.utils.PathUtil;
+import org.apache.maven.archiva.converter.legacy.LegacyRepositoryConverter;
+import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.artifact.factory.ArtifactFactory;
-import org.apache.maven.artifact.metadata.ArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
-import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
-import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.i18n.I18N;
import java.io.File;
import java.io.IOException;
-import java.text.SimpleDateFormat;
import java.util.ArrayList;
-import java.util.Iterator;
import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.regex.Matcher;
/**
* Test the repository converter.
{
private ArtifactRepository sourceRepository;
- private ArtifactRepository targetRepository;
+ private ArchivaRepository targetRepository;
- private RepositoryConverter repositoryConverter;
+ private LegacyRepositoryConverter repositoryConverter;
private ArtifactFactory artifactFactory;
File targetBase = getTestFile( "target/test-target-repository" );
copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
- targetRepository = factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null,
- null );
+ targetRepository = new ArchivaRepository( "target", "Target Repo", PathUtil.toUrl( targetBase ) );
+ targetRepository.getModel().setLayoutName( "default" );
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
+ repositoryConverter = (LegacyRepositoryConverter) lookup( LegacyRepositoryConverter.ROLE, "default" );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
}
}
- public void testV4PomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that it is copied as is
-
- Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- artifactFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkSuccess(listener);
-
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v4-version-metadata.xml" );
-
- compareFiles( expectedMetadataFile, versionMetadataFile );
- }
-
- public void testV3PomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkSuccess(listener);
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", versionMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-version-metadata.xml" );
-
- compareFiles( expectedMetadataFile, versionMetadataFile );
- }
-
- public void testV3PomConvertWithRelocation()
- throws RepositoryConversionException, IOException
- {
- Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- repositoryConverter.convert( artifact, targetRepository );
- //checkSuccess(); --> commented until MNG-2100 is fixed
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check if relocated artifact created", artifactFile.exists() );
- assertTrue( "Check if relocated artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
- Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
- File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
- File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
- compareFiles( pomFile, testFile );
-
- Artifact orig = createArtifact( "test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
- artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( orig ) );
- assertTrue( "Check if relocation artifact pom is created", artifactFile.exists() );
- testFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( orig ) );
- compareFiles( artifactFile, testFile );
- }
-
- public void testV3PomWarningsOnConvert()
- throws RepositoryConversionException, IOException
- {
- // test that the pom is converted but that warnings are reported
-
- Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( versionMetadata ) );
- versionMetadataFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 2 );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-warnings.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- // TODO: check 2 warnings (extend and versions) matched on i18n key
- }
-
- private void doTestV4SnapshotPomConvert( String version, String expectedMetadataFileName )
- throws RepositoryConversionException, IOException
- {
- // test that it is copied as is
-
- Artifact artifact = createArtifact( "test", "v4artifact", version );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( expectedMetadataFileName );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testV3SnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-snapshot.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-metadata.xml" );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testV4SnapshotPomConvert()
+ public void testLegacyConversion()
throws IOException, RepositoryConversionException
{
- doTestV4SnapshotPomConvert( "1.0.0-SNAPSHOT", "src/test/expected-files/v4-snapshot-metadata.xml" );
-
- assertTrue( true );
- }
-
- public void testV4TimestampedSnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- doTestV4SnapshotPomConvert( "1.0.0-20060111.120115-1",
- "src/test/expected-files/v4-timestamped-snapshot-metadata.xml" );
-
- assertTrue( true );
- }
-
- public void testMavenOnePluginConversion()
- throws Exception
- {
- Artifact artifact = createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0",
- "maven-plugin" );
- artifact.setFile( new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
- repositoryConverter.convert( artifact, targetRepository );
- // There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
- // the plugin is being coverted correctly.
- //checkSuccess();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- /*
- The POM isn't needed for Maven 1.x plugins but the raw conversion for
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
- compareFiles( expectedPomFile, pomFile );
- */
- }
-
- public void testV3TimestampedSnapshotPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that the pom is coverted
-
- Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- artifactMetadataFile.delete();
-
- ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
- snapshotMetadataFile.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-v3-timestamped-snapshot.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
-
- assertTrue( "Check artifact snapshotMetadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/v3-snapshot-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
-
- assertTrue( "Check snapshot snapshotMetadata created", snapshotMetadataFile.exists() );
-
- expectedMetadataFile = getTestFile( "src/test/expected-files/v3-timestamped-snapshot-metadata.xml" );
-
- compareFiles( expectedMetadataFile, snapshotMetadataFile );
- }
-
- public void testNoPomConvert()
- throws IOException, RepositoryConversionException
- {
- // test that a POM is not created when there was none at the source
-
- Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 1 );
-
- assertHasWarningReason( listener, getI18nString( "warning.missing.pom" ) );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
-
- assertFalse( "Check no POM created", pomFile.exists() );
- assertFalse( "No source POM", sourcePomFile.exists() );
- }
-
- public void testIncorrectSourceChecksumMd5()
- throws RepositoryConversionException
- {
- // test that it fails when the source md5 is wrong
-
- Artifact artifact = createArtifact( "test", "incorrectMd5Artifact", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 1, 0 );
-
- assertHasErrorReason( listener, getI18nString( "failure.incorrect.md5" ) );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testIncorrectSourceChecksumSha1()
- throws RepositoryConversionException
- {
- // test that it fails when the source sha1 is wrong
-
- Artifact artifact = createArtifact( "test", "incorrectSha1Artifact", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 1, 0 );
-
- assertHasErrorReason( listener, getI18nString( "failure.incorrect.sha1" ) );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testUnmodifiedArtifact()
- throws RepositoryConversionException, IOException, InterruptedException
- {
- // test the unmodified artifact is untouched
-
- Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- compareFiles( sourceFile, targetFile );
- compareFiles( sourcePomFile, targetPomFile );
-
- assertEquals( "Check artifact unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check POM unmodified", origPomTime, targetPomFile.lastModified() );
- }
-
- public void testModifedArtifactFails()
- throws InterruptedException, RepositoryConversionException, IOException
- {
- // test that it fails when the source artifact has changed and is different to the existing artifact in the
- // target repository
-
- Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 1, 0 );
-
- assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
-
- assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testForcedUnmodifiedArtifact()
- throws Exception
- {
- // test unmodified artifact is still converted when set to force
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "force-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "unmodified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyy-MM-dd", Locale.getDefault() );
- long origTime = dateFormat.parse( "2006-03-03" ).getTime();
- targetFile.setLastModified( origTime );
- targetPomFile.setLastModified( origTime );
-
- sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
- sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- compareFiles( sourceFile, targetFile );
- compareFiles( sourcePomFile, targetPomFile );
-
- assertFalse( "Check modified", origTime == targetFile.lastModified() );
- assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertTrue( "Check metadata created", metadataFile.exists() );
- }
-
- public void testDryRunSuccess()
- throws Exception
- {
- // test dry run does nothing on a run that will be successful, and returns success
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "dryrun-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 0, 0 );
-
- assertTrue( "Check source file exists", sourceFile.exists() );
- assertTrue( "Check source POM exists", sourcePomFile.exists() );
-
- assertFalse( "Check target file doesn't exist", targetFile.exists() );
- assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testDryRunFailure()
- throws Exception
- {
- // test dry run does nothing on a run that will fail, and returns failure
-
- repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "dryrun-repository-converter" );
-
- Artifact artifact = createArtifact( "test", "modified-artifact", "1.0.0" );
- Artifact pomArtifact = createPomArtifact( artifact );
-
- File sourceFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( pomArtifact ) );
- File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
-
- assertTrue( "Check target file exists", targetFile.exists() );
- assertTrue( "Check target POM exists", targetPomFile.exists() );
-
- sourceFile.setLastModified( System.currentTimeMillis() );
- sourcePomFile.setLastModified( System.currentTimeMillis() );
-
- long origTime = targetFile.lastModified();
- long origPomTime = targetPomFile.lastModified();
-
- // Need to guarantee last modified is not equal
- Thread.sleep( SLEEP_MILLIS );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkFailure(listener);
-
- assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
-
- assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
- assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testRollbackArtifactCreated()
- throws RepositoryConversionException, IOException
- {
- // test rollback can remove a created artifact, including checksums
-
- Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
-
- ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( versionMetadata ) );
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkCounts( listener, 1, 0 );
-
- List messages = (List) listener.getErrors().get( listener.toKey( artifact ));
- assertNotNull("Should have error messages.");
-
- boolean found = false;
- String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
- for ( Iterator it = messages.iterator(); it.hasNext(); )
- {
- String reason = (String) it.next();
- if( reason.matches( pattern ) )
- {
- found = true;
- break;
- }
- }
-
- assertTrue( "Check failure message.", found );
-
- assertFalse( "check artifact rolled back", artifactFile.exists() );
- assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
- assertFalse( "check metadata rolled back", versionMetadataFile.exists() );
- }
-
- public void testMultipleArtifacts()
- throws RepositoryConversionException, IOException
- {
- // test multiple artifacts are converted
-
- List artifacts = new ArrayList();
- artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
- artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
- artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifacts, targetRepository );
- checkCounts( listener, 0, 0 );
-
- for ( Iterator i = artifacts.iterator(); i.hasNext(); )
- {
- Artifact artifact = (Artifact) i.next();
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId()
- + ".pom" );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( expectedPomFile, pomFile );
- }
- }
-
- public void testInvalidSourceArtifactMetadata()
- throws Exception
- {
- // test artifact is not converted when source metadata is invalid, and returns failure
-
- createModernSourceRepository();
-
- Artifact artifact = createArtifact( "test", "incorrectArtifactMetadata", "1.0.0" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkFailure(listener);
-
- assertHasErrorReason( listener, getI18nString( "failure.incorrect.artifactMetadata.versions" ) );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testInvalidSourceSnapshotMetadata()
- throws Exception
- {
- // test artifact is not converted when source snapshot metadata is invalid and returns failure
-
- createModernSourceRepository();
-
- Artifact artifact = createArtifact( "test", "incorrectSnapshotMetadata", "1.0.0-20060102.030405-6" );
- File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- file.delete();
-
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkFailure(listener);
-
- assertHasErrorReason( listener, getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ) );
-
- assertFalse( "Check artifact not created", file.exists() );
-
- ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( metadata ) );
- assertFalse( "Check metadata not created", metadataFile.exists() );
- }
-
- public void testMergeArtifactMetadata()
- throws RepositoryConversionException, IOException
- {
- // test artifact level metadata is merged when it already exists on successful conversion
-
- Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
- MockConversionListener listener = new MockConversionListener();
-
- repositoryConverter.addConversionListener( listener );
- repositoryConverter.convert( artifact, targetRepository );
- checkSuccess(listener);
-
- File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- assertTrue( "Check artifact created", artifactFile.exists() );
- assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
-
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
- assertTrue( "Check POM created", pomFile.exists() );
-
- compareFiles( sourcePomFile, pomFile );
-
- ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
- .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
- assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
-
- File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
-
- compareFiles( expectedMetadataFile, artifactMetadataFile );
- }
-
- public void testSourceAndTargetRepositoriesMatch()
- throws Exception
- {
- // test that it fails if the same
-
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), targetRepository
- .getLayout(), null, null );
-
- Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
-
- try
- {
- repositoryConverter.convert( artifact, targetRepository );
- fail( "Should have failed trying to convert within the same repository" );
- }
- catch ( RepositoryConversionException e )
- {
- // expected
- assertEquals( "check message", getI18nString( "exception.repositories.match" ), e.getMessage() );
- assertNull( "Check no additional cause", e.getCause() );
- }
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String version )
- {
- Matcher matcher = Artifact.VERSION_FILE_PATTERN.matcher( version );
- String baseVersion;
- if ( matcher.matches() )
- {
- baseVersion = matcher.group( 1 ) + "-SNAPSHOT";
- }
- else
- {
- baseVersion = version;
- }
- return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
- }
-
- private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, String type )
- {
- Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
- artifact.setBaseVersion( baseVersion );
- artifact.setRepository( sourceRepository );
- artifact.setFile( new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) ) );
- return artifact;
- }
-
- private Artifact createPomArtifact( Artifact artifact )
- {
- return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact
- .getVersion(), "pom" );
- }
-
- private static void compareFiles( File expectedPomFile, File pomFile )
- throws IOException
- {
- String expectedContent = normalizeString( FileUtils.readFileToString( expectedPomFile, null ) );
- String targetContent = normalizeString( FileUtils.readFileToString( pomFile, null ) );
- assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, targetContent );
- }
-
- private static String normalizeString( String path )
- {
- return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
- }
-
- private void checkSuccess(MockConversionListener listener)
- {
- checkCounts( listener, 0, 0 );
- }
-
- private void checkFailure(MockConversionListener listener)
- {
- checkCounts( listener, 1, 0 );
- }
-
- private void checkCounts( MockConversionListener listener, int failures, int warnings )
- {
- int actualFailures = listener.getErrorMessageCount();
- int actualWarnings = listener.getWarningMessageCount();
-
- if ( ( failures != actualFailures ) || ( warnings != actualWarnings ) )
- {
- fail( "Check Results Counts expected:<" + failures + "," + warnings + "> but was:<" + actualFailures + ","
- + actualWarnings + ">" );
- }
- }
-
- private String getI18nString( String key )
- {
- return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
- }
-
- private void assertHasWarningReason( MockConversionListener listener, String reason )
- {
- assertHasMessage( listener.getWarnings(), "warning", reason );
- }
-
- private void assertHasErrorReason( MockConversionListener listener, String reason )
- {
- assertHasMessage( listener.getErrors(), "error", reason );
- }
-
- private void assertHasMessage( Map map, String type, String message )
- {
- if ( ( map == null ) || ( map.isEmpty() ) )
- {
- fail( "No " + type + "s captured, expected " + type + " <" + message + ">" );
- }
-
- // Attempt to find the message ...
- for ( Iterator it = map.values().iterator(); it.hasNext(); )
- {
- List msgList = (List) it.next();
-
- if ( msgList.contains( message ) )
- {
- // Found it!
- return;
- }
- }
-
- // Didn't find it! whoops ...
- for ( Iterator it = map.entrySet().iterator(); it.hasNext(); )
- {
- Map.Entry entry = (Entry) it.next();
- String key = (String) entry.getKey();
- List msgList = (List) entry.getValue();
-
- System.err.println( " Artifact: " + key );
-
- for ( Iterator itMsgs = msgList.iterator(); itMsgs.hasNext(); )
- {
- String msg = (String) itMsgs.next();
- System.err.println( " " + msg );
- }
- }
-
- fail( "Unable to find " + type + " reason <" + message + "> in any artifact." );
- }
-
- private void createModernSourceRepository()
- throws Exception
- {
- ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
-
- ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
-
- File sourceBase = getTestFile( "src/test/source-modern-repository" );
- sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
- null );
+ File legacyRepoDir = new File( sourceRepository.getBasedir() );
+ File destRepoDir = new File( targetRepository.getUrl().getPath() );
+ List excludes = new ArrayList();
+ repositoryConverter.convertLegacyRepository( legacyRepoDir, destRepoDir, excludes );
}
}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-import org.codehaus.plexus.PlexusTestCase;
-import org.codehaus.plexus.digest.Digester;
-
-/**
- *
- * @author <a href="mailto:carlos@apache.org">Carlos Sanchez</a>
- * @version $Id$
- */
-public abstract class AbstractFileEventTest
- extends PlexusTestCase
-{
- protected List digesters;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- digesters = getContainer().lookupList( Digester.class.getName() );
- }
-
- protected void assertChecksumExists( File file, String algorithm )
- {
- assertChecksum( file, algorithm, true );
- }
-
- protected void assertChecksumDoesNotExist( File file, String algorithm )
- {
- assertChecksum( file, algorithm, false );
- }
-
- private void assertChecksum( File file, String algorithm, boolean exist )
- {
- String msg = exist ? "exists" : "does not exist";
- File checksumFile = new File( file.getPath() + "." + algorithm );
- assertEquals( "Test file " + algorithm + " checksum " + msg, exist, checksumFile.exists() );
- }
-
- protected void assertChecksumCommit( File file )
- throws IOException
- {
- assertChecksumExists( file, "md5" );
- assertChecksumExists( file, "sha1" );
- }
-
- protected void assertChecksumRollback( File file )
- throws IOException
- {
- assertChecksumDoesNotExist( file, "md5" );
- assertChecksumDoesNotExist( file, "sha1" );
- }
-}
\ No newline at end of file
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-/**
- * AllTests
- *
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
- * @version $Id$
- */
-public class AllTests
-{
-
- public static Test suite()
- {
- TestSuite suite = new TestSuite( "Test for org.apache.maven.archiva.converter.transaction" );
- //$JUnit-BEGIN$
- suite.addTestSuite( CreateFileEventTest.class );
- suite.addTestSuite( CopyFileEventTest.class );
- //$JUnit-END$
- return suite;
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.PlexusTestCase;
-
-/**
- * @author Edwin Punzalan
- */
-public class CopyFileEventTest
- extends AbstractFileEventTest
-{
- private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/copy-file" );
-
- private File testDest = new File( testDir, "test-file.txt" );
-
- private File testSource = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/test-file.txt" );
-
- private File testDestChecksum;
-
- private String source, oldChecksum;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- testSource.getParentFile().mkdirs();
-
- testSource.createNewFile();
-
- FileUtils.writeStringToFile( testSource, "source contents", null );
-
- testDestChecksum = new File( testDest.getPath() + ".sha1" );
-
- testDestChecksum.getParentFile().mkdirs();
-
- testDestChecksum.createNewFile();
-
- FileUtils.writeStringToFile( testDestChecksum, "this is the checksum", null );
-
- assertTrue( "Test if the source exists", testSource.exists() );
-
- assertTrue( "Test if the destination checksum exists", testDestChecksum.exists() );
-
- source = FileUtils.readFileToString( testSource, null );
-
- oldChecksum = FileUtils.readFileToString( testDestChecksum, null );
- }
-
- public void testCopyCommitRollback()
- throws Exception
- {
- CopyFileEvent event = new CopyFileEvent( testSource, testDest, digesters );
-
- assertFalse( "Test that the destination is not yet created", testDest.exists() );
-
- event.commit();
-
- assertTrue( "Test that the destination is created", testDest.exists() );
-
- assertChecksumCommit( testDest );
-
- String target = FileUtils.readFileToString( testDest, null );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
-
- event.rollback();
-
- assertFalse( "Test that the destination file has been deleted", testDest.exists() );
-
- assertChecksumRollback( testDest );
- }
-
- public void testCopyCommitRollbackWithBackup()
- throws Exception
- {
- testDest.getParentFile().mkdirs();
-
- testDest.createNewFile();
-
- FileUtils.writeStringToFile( testDest, "overwritten contents", null );
-
- assertTrue( "Test that the destination exists", testDest.exists() );
-
- CopyFileEvent event = new CopyFileEvent( testSource, testDest, digesters );
-
- String target = FileUtils.readFileToString( testDest, null );
-
- assertTrue( "Test that the destination contents have not changed", target.equals( "overwritten contents" ) );
-
- event.commit();
-
- target = FileUtils.readFileToString( testDest, null );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
-
- assertChecksumCommit( testDest );
-
- event.rollback();
-
- target = FileUtils.readFileToString( testDest, null );
-
- assertTrue( "Test the destination file contents have been restored", target.equals( "overwritten contents" ) );
-
- assertChecksumRollback( testDest );
- }
-
- public void testCreateRollbackCommit()
- throws Exception
- {
- CopyFileEvent event = new CopyFileEvent( testSource, testDest, digesters );
-
- assertFalse( "Test that the destination is not yet created", testDest.exists() );
-
- event.rollback();
-
- assertFalse( "Test that the destination file is not yet created", testDest.exists() );
-
- event.commit();
-
- assertTrue( "Test that the destination is created", testDest.exists() );
-
- assertChecksumCommit( testDest );
-
- String target = FileUtils.readFileToString( testDest, null );
-
- assertTrue( "Test that the destination contents are copied correctly", source.equals( target ) );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
-
- FileUtils.deleteDirectory( new File( PlexusTestCase.getBasedir(), "target/transaction-tests" ) );
- }
-
- protected void assertChecksumCommit( File file )
- throws IOException
- {
- super.assertChecksumCommit( file );
-
- String target = FileUtils.readFileToString( testDestChecksum, null );
-
- assertFalse( "Test that the destination checksum contents are created correctly", oldChecksum.equals( target ) );
- }
-
- protected void assertChecksumRollback( File file )
- throws IOException
- {
- assertChecksumDoesNotExist( file, "md5" );
- assertChecksumExists( file, "sha1" );
-
- String target = FileUtils.readFileToString( testDestChecksum, null );
-
- assertEquals( "Test that the destination checksum contents are reverted correctly", oldChecksum, target );
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.converter.transaction;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-
-import org.apache.commons.io.FileUtils;
-import org.codehaus.plexus.PlexusTestCase;
-
-/**
- * @author Edwin Punzalan
- */
-public class CreateFileEventTest
- extends AbstractFileEventTest
-{
- private File testDir = new File( PlexusTestCase.getBasedir(), "target/transaction-tests/create-file" );
-
- public void testCreateCommitRollback()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- CreateFileEvent event = new CreateFileEvent( "file contents", testFile, digesters );
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.commit();
-
- assertTrue( "Test file has been created", testFile.exists() );
-
- assertChecksumCommit( testFile );
-
- event.rollback();
-
- assertFalse( "Test file is has been deleted after rollback", testFile.exists() );
-
- assertChecksumRollback( testFile );
-
- assertFalse( "Test file parent directories has been rolledback too", testDir.exists() );
- assertTrue( "target directory still exists", new File( PlexusTestCase.getBasedir(), "target" ).exists() );
- }
-
- public void testCreateCommitRollbackWithBackup()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- testFile.getParentFile().mkdirs();
-
- testFile.createNewFile();
-
- FileUtils.writeStringToFile( testFile, "original contents", null );
-
- CreateFileEvent event = new CreateFileEvent( "modified contents", testFile, digesters );
-
- String contents = FileUtils.readFileToString( testFile, null );
-
- assertEquals( "Test contents have not changed", "original contents", contents );
-
- event.commit();
-
- contents = FileUtils.readFileToString( testFile, null );
-
- assertEquals( "Test contents have not changed", "modified contents", contents );
-
- assertChecksumCommit( testFile );
-
- event.rollback();
-
- contents = FileUtils.readFileToString( testFile, null );
-
- assertEquals( "Test contents have not changed", "original contents", contents );
-
- assertChecksumRollback( testFile );
- }
-
- public void testCreateRollbackCommit()
- throws Exception
- {
- File testFile = new File( testDir, "test-file.txt" );
-
- CreateFileEvent event = new CreateFileEvent( "file contents", testFile, digesters );
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.rollback();
-
- assertFalse( "Test file is not yet created", testFile.exists() );
-
- event.commit();
-
- assertTrue( "Test file is not yet created", testFile.exists() );
-
- assertChecksumCommit( testFile );
- }
-
- protected void tearDown()
- throws Exception
- {
- super.tearDown();
-
- FileUtils.deleteDirectory( new File( PlexusTestCase.getBasedir(), "target/transaction-tests" ) );
- }
-}
<module>archiva-repository-layer</module>
<module>archiva-xml-tools</module>
<module>archiva-proxy</module>
- <!-- DOES NOT COMPILE (yet)
- <module>archiva-converter</module>
- -->
+ <module>archiva-converter</module>
</modules>
</project>
<dependencies>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-converter</artifactId>
+ <artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-repository-layer</artifactId>
+ <artifactId>archiva-core-consumers</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-core-consumers</artifactId>
+ <artifactId>archiva-converter</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
try
{
legacyRepositoryConverter.convertLegacyRepository( oldRepositoryPath, newRepositoryPath,
- fileExclusionPatterns, true );
+ fileExclusionPatterns );
}
catch ( RepositoryConversionException e )
{