Browse Source

implement merge method in merge repositories service

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@1387390 13f79535-47bb-0310-9956-ffa450edef68
tags/archiva-1.4-M3
Olivier Lamy 11 years ago
parent
commit
d1d92da751

+ 12
- 0
archiva-modules/archiva-web/archiva-rest/archiva-rest-api/src/main/java/org/apache/archiva/rest/api/services/MergeRepositoriesService.java View File

@@ -48,4 +48,16 @@ public interface MergeRepositoriesService
List<Artifact> getMergeConflictedArtifacts( @PathParam ( "sourceRepositoryId" ) String sourceRepositoryId,
@PathParam ( "targetRepositoryId" ) String targetRepositoryId )
throws ArchivaRestServiceException;

@Path ( "mergeRepositories/{sourceRepositoryId}/{targetRepositoryId}/{skipConflicts}" )
@GET
@RedbackAuthorization ( noPermission = true )
/**
* <b>permissions are checked in impl</b>
* @since 1.4-M3
*/
void mergeRepositories( @PathParam ( "sourceRepositoryId" ) String sourceRepositoryId,
@PathParam ( "targetRepositoryId" ) String targetRepositoryId,
@PathParam ( "skipConflicts" ) boolean skipConflicts )
throws ArchivaRestServiceException;
}

+ 29
- 0
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/AbstractRestService.java View File

@@ -29,6 +29,7 @@ import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.redback.rest.services.RedbackAuthenticationThreadLocal;
import org.apache.archiva.redback.rest.services.RedbackRequestInformation;
import org.apache.archiva.redback.users.User;
@@ -37,6 +38,8 @@ import org.apache.archiva.repository.RepositoryContentFactory;
import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
import org.apache.archiva.rest.services.utils.ArtifactBuilder;
import org.apache.archiva.scheduler.repository.RepositoryArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.RepositoryTask;
import org.apache.archiva.security.AccessDeniedException;
import org.apache.archiva.security.ArchivaSecurityException;
import org.apache.archiva.security.PrincipalNotFoundException;
@@ -88,6 +91,10 @@ public abstract class AbstractRestService
@Inject
protected RepositoryContentFactory repositoryContentFactory;

@Inject
@Named ( value = "archivaTaskScheduler#repository" )
protected RepositoryArchivaTaskScheduler repositoryTaskScheduler;

@Context
protected HttpServletRequest httpServletRequest;

@@ -272,4 +279,26 @@ public abstract class AbstractRestService
Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e );
}
}

protected Boolean doScanRepository( String repositoryId, boolean fullScan )
{
if ( repositoryTaskScheduler.isProcessingRepositoryTask( repositoryId ) )
{
log.info( "scanning of repository with id {} already scheduled", repositoryId );
return Boolean.FALSE;
}
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setScanAll( fullScan );
try
{
repositoryTaskScheduler.queueTask( task );
}
catch ( TaskQueueException e )
{
log.error( "failed to schedule scanning of repo with id {}", repositoryId, e );
return false;
}
return true;
}
}

+ 148
- 1
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultMergeRepositoriesService.java View File

@@ -18,16 +18,26 @@ package org.apache.archiva.rest.services;
* under the License.
*/

import org.apache.archiva.admin.model.RepositoryAdminException;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.common.utils.VersionUtil;
import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
import org.apache.archiva.metadata.repository.RepositorySession;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
import org.apache.archiva.rest.api.services.MergeRepositoriesService;
import org.apache.archiva.stagerepository.merge.Maven2RepositoryMerger;
import org.apache.archiva.stagerepository.merge.RepositoryMergerException;
import org.springframework.stereotype.Service;

import javax.inject.Inject;
import javax.inject.Named;
import java.util.ArrayList;
import java.util.List;

/**
@@ -57,7 +67,7 @@ public class DefaultMergeRepositoriesService

return buildArtifacts( artifactMetadatas, sourceRepositoryId );
}
catch ( Exception e )
catch ( RepositoryMergerException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}
@@ -65,6 +75,143 @@ public class DefaultMergeRepositoriesService
{
repositorySession.close();
}
}

public void mergeRepositories( String sourceRepositoryId, String targetRepositoryId, boolean skipConflicts )
throws ArchivaRestServiceException
{
try
{
if ( skipConflicts )
{
mergeBySkippingConflicts( sourceRepositoryId, targetRepositoryId );
}
else
{
doMerge( sourceRepositoryId, targetRepositoryId );
}

}
catch ( RepositoryMergerException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}

}


protected void doMerge( String sourceRepositoryId, String targetRepositoryId )
throws RepositoryMergerException, ArchivaRestServiceException
{
RepositorySession repositorySession = repositorySessionFactory.createSession();

try
{
ManagedRepository repository = managedRepositoryAdmin.getManagedRepository( targetRepositoryId );
MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepositoryId );

if ( repository.isReleases() && !repository.isSnapshots() )
{
mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepositoryId, targetRepositoryId );
}
else
{
repositoryMerger.merge( metadataRepository, sourceRepositoryId, targetRepositoryId );

for ( ArtifactMetadata metadata : sourceArtifacts )
{
triggerAuditEvent( targetRepositoryId, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
}
}

doScanRepository( targetRepositoryId, false );
}
catch ( MetadataRepositoryException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}
finally
{
repositorySession.close();
}
}

public void mergeBySkippingConflicts( String sourceRepositoryId, String targetRepositoryId )
throws RepositoryMergerException, ArchivaRestServiceException
{

RepositorySession repositorySession = repositorySessionFactory.createSession();
try
{
List<ArtifactMetadata> conflictSourceArtifacts =
repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(), sourceRepositoryId,
targetRepositoryId );
MetadataRepository metadataRepository = repositorySession.getRepository();
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepositoryId );
sourceArtifacts.removeAll( conflictSourceArtifacts );

ManagedRepository repository = managedRepositoryAdmin.getManagedRepository( targetRepositoryId );

if ( repository.isReleases() && !repository.isSnapshots() )
{
mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepositoryId, targetRepositoryId );
}
else
{

Filter<ArtifactMetadata> artifactsWithOutConflicts =
new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
repositoryMerger.merge( metadataRepository, sourceRepositoryId, targetRepositoryId,
artifactsWithOutConflicts );
for ( ArtifactMetadata metadata : sourceArtifacts )
{
triggerAuditEvent( targetRepositoryId, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
}
}

doScanRepository( targetRepositoryId, false );
}
catch ( MetadataRepositoryException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );
}
catch ( RepositoryAdminException e )
{
throw new ArchivaRestServiceException( e.getMessage(), e );

}
finally
{
repositorySession.close();
}
}

private void mergeWithOutSnapshots( MetadataRepository metadataRepository, List<ArtifactMetadata> sourceArtifacts,
String sourceRepoId, String repoid )
throws RepositoryMergerException
{
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
for ( ArtifactMetadata metadata : sourceArtifacts )
{
if ( VersionUtil.isSnapshot( metadata.getProjectVersion() ) )
//if ( metadata.getProjectVersion().contains( VersionUtil.SNAPSHOT ) )
{
artifactsWithOutSnapshots.add( metadata );
}
else
{
triggerAuditEvent( repoid, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
}

}
sourceArtifacts.removeAll( artifactsWithOutSnapshots );

Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
}
}

+ 1
- 24
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java View File

@@ -70,8 +70,6 @@ import org.apache.archiva.scheduler.indexing.ArchivaIndexingTaskExecutor;
import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexException;
import org.apache.archiva.scheduler.indexing.DownloadRemoteIndexScheduler;
import org.apache.archiva.scheduler.repository.ArchivaRepositoryScanningTaskExecutor;
import org.apache.archiva.scheduler.repository.RepositoryArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.RepositoryTask;
import org.apache.archiva.security.ArchivaSecurityException;
import org.apache.archiva.security.common.ArchivaRoleConstants;
@@ -113,10 +111,6 @@ public class DefaultRepositoriesService
{
private Logger log = LoggerFactory.getLogger( getClass() );

@Inject
@Named ( value = "archivaTaskScheduler#repository" )
private RepositoryArchivaTaskScheduler repositoryTaskScheduler;

@Inject
@Named ( value = "taskExecutor#indexing" )
private ArchivaIndexingTaskExecutor archivaIndexingTaskExecutor;
@@ -157,24 +151,7 @@ public class DefaultRepositoriesService

public Boolean scanRepository( String repositoryId, boolean fullScan )
{
if ( repositoryTaskScheduler.isProcessingRepositoryTask( repositoryId ) )
{
log.info( "scanning of repository with id {} already scheduled", repositoryId );
return Boolean.FALSE;
}
RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repositoryId );
task.setScanAll( fullScan );
try
{
repositoryTaskScheduler.queueTask( task );
}
catch ( TaskQueueException e )
{
log.error( "failed to schedule scanning of repo with id {}", repositoryId, e );
return false;
}
return true;
return doScanRepository( repositoryId, fullScan );
}

public Boolean alreadyScanning( String repositoryId )

+ 13
- 7
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/AbstractArchivaRestTest.java View File

@@ -55,7 +55,7 @@ import java.util.Date;
/**
* @author Olivier Lamy
*/
@RunWith ( ArchivaBlockJUnit4ClassRunner.class )
@RunWith (ArchivaBlockJUnit4ClassRunner.class)
public abstract class AbstractArchivaRestTest
extends AbstractRestServicesTest
{
@@ -123,16 +123,16 @@ public abstract class AbstractArchivaRestTest
return getRepositoriesService( null );
}

protected MergeRepositoriesService getMergeRepositoriesService()
protected MergeRepositoriesService getMergeRepositoriesService( String authzHeader )
{
MergeRepositoriesService service =
JAXRSClientFactory.create( getBaseUrl() + "/" + getRestServicesPath() + "/archivaServices/",
MergeRepositoriesService.class,
Collections.singletonList( new JacksonJaxbJsonProvider() ) );

if ( authorizationHeader != null )
if ( authzHeader != null )
{
WebClient.client( service ).header( "Authorization", authorizationHeader );
WebClient.client( service ).header( "Authorization", authzHeader );
}
WebClient.getConfig( service ).getHttpConduit().getClient().setReceiveTimeout( 100000000 );
WebClient.client( service ).accept( MediaType.APPLICATION_JSON_TYPE );
@@ -481,11 +481,17 @@ public abstract class AbstractArchivaRestTest
protected void deleteTestRepo( String id )
throws Exception
{
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( id ) != null )
try
{
getManagedRepositoriesService( authorizationHeader ).deleteManagedRepository( id, false );
if ( getManagedRepositoriesService( authorizationHeader ).getManagedRepository( id ) != null )
{
getManagedRepositoriesService( authorizationHeader ).deleteManagedRepository( id, false );
}
}
catch ( Exception e )
{
log.warn( "skip error deleting repo {}", id, e );
}

}

public String getBasedir()

+ 52
- 16
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/MergeRepositoriesServiceTest.java View File

@@ -19,7 +19,6 @@ package org.apache.archiva.rest.services;
*/

import org.apache.archiva.maven2.model.Artifact;
import org.apache.archiva.rest.api.services.BrowseService;
import org.apache.archiva.rest.api.services.MergeRepositoriesService;
import org.apache.commons.io.FileUtils;
import org.fest.assertions.api.Assertions;
@@ -37,16 +36,19 @@ public class MergeRepositoriesServiceTest
extends AbstractArchivaRestTest
{

private File repo = new File( System.getProperty( "builddir" ), "test-repository" );

private File repoStage = new File( System.getProperty( "builddir" ), "test-repository-stage" );

@Override
@Before
public void startServer()
throws Exception
{

FileUtils.copyDirectory( new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ),
new File( System.getProperty( "builddir" ), "test-repository" ) );
FileUtils.copyDirectory( new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi" ), repo );
FileUtils.copyDirectory( new File( System.getProperty( "basedir" ), "src/test/repo-with-osgi-stage" ),
new File( System.getProperty( "builddir" ), "test-repository-stage" ) );
repoStage );
super.startServer();

}
@@ -58,27 +60,21 @@ public class MergeRepositoriesServiceTest
{
// TODO delete repositories
super.stopServer();
FileUtils.deleteDirectory( new File( System.getProperty( "builddir" ), "test-repository" ) );
FileUtils.deleteDirectory( new File( System.getProperty( "builddir" ), "test-repository-stage" ) );
FileUtils.deleteDirectory( repo );
FileUtils.deleteDirectory( repoStage );
}

@Test
public void mergeConflictedArtifacts()
public void getMergeConflictedArtifacts()
throws Exception
{
String testRepoId = "test-repository";
try
{
String testRepoId = "test-repository";
createStagedNeededRepo( testRepoId,
new File( System.getProperty( "builddir" ), "test-repository" ).getAbsolutePath(),
true );

// force jcr data population !
BrowseService browseService = getBrowseService( authorizationHeader, false );
browseService.getRootGroups( testRepoId );
browseService.getRootGroups( testRepoId + "-stage" );
createStagedNeededRepo( testRepoId, repo.getAbsolutePath(), true );

MergeRepositoriesService service = getMergeRepositoriesService();
MergeRepositoriesService service = getMergeRepositoriesService( authorizationHeader );

List<Artifact> artifactMetadatas = service.getMergeConflictedArtifacts( testRepoId + "-stage", testRepoId );

@@ -86,12 +82,52 @@ public class MergeRepositoriesServiceTest

Assertions.assertThat( artifactMetadatas ).isNotNull().isNotEmpty().hasSize( 8 );


}
catch ( Exception e )
{
log.error( e.getMessage(), e );
throw e;
}
finally
{
deleteTestRepo( testRepoId );
}
}

@Test
public void merge()
throws Exception
{
String testRepoId = "test-repository";
try
{
createStagedNeededRepo( testRepoId, repo.getAbsolutePath(), true );

String mergedArtifactPath =
"org/apache/felix/org.apache.felix.bundlerepository/1.6.4/org.apache.felix.bundlerepository-1.6.4.jar";
String mergedArtifactPomPath =
"org/apache/felix/org.apache.felix.bundlerepository/1.6.4/org.apache.felix.bundlerepository-1.6.4.pom";

assertTrue( new File( repoStage, mergedArtifactPath ).exists() );
assertTrue( new File( repoStage, mergedArtifactPomPath ).exists() );

MergeRepositoriesService service = getMergeRepositoriesService( authorizationHeader );

service.mergeRepositories( testRepoId + "-stage", testRepoId, true );

assertTrue( new File( repo, mergedArtifactPath ).exists() );
assertTrue( new File( repo, mergedArtifactPomPath ).exists() );

}
catch ( Exception e )
{
log.error( e.getMessage(), e );
throw e;
}
finally
{
deleteTestRepo( testRepoId );
}
}
}

+ 24
- 23
archiva-modules/archiva-web/archiva-webapp/src/main/java/org/apache/archiva/web/action/MergeAction.java View File

@@ -186,6 +186,30 @@ public class MergeAction
}
}

private void mergeWithOutSnapshots( MetadataRepository metadataRepository, List<ArtifactMetadata> sourceArtifacts,
String sourceRepoId, String repoid )
throws Exception
{
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
for ( ArtifactMetadata metadata : sourceArtifacts )
{

if ( metadata.getProjectVersion().contains( VersionUtil.SNAPSHOT ) )
{
artifactsWithOutSnapshots.add( metadata );
}
else
{
triggerAuditEvent( repoid, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
}

}
sourceArtifacts.removeAll( artifactsWithOutSnapshots );

Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
}

public String mergeWithOutConlficts()
{
sourceRepoId = repoid + "-stage";
@@ -289,29 +313,6 @@ public class MergeAction
conflictSourceArtifactsToBeDisplayed.addAll( map.values() );
}

private void mergeWithOutSnapshots( MetadataRepository metadataRepository, List<ArtifactMetadata> sourceArtifacts,
String sourceRepoId, String repoid )
throws Exception
{
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
for ( ArtifactMetadata metadata : sourceArtifacts )
{

if ( metadata.getProjectVersion().contains( VersionUtil.SNAPSHOT ) )
{
artifactsWithOutSnapshots.add( metadata );
}
else
{
triggerAuditEvent( repoid, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
}

}
sourceArtifacts.removeAll( artifactsWithOutSnapshots );

Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
}

private void scanRepository()
{

+ 3
- 15
archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java View File

@@ -35,7 +35,7 @@ import org.apache.archiva.repository.RepositoryException;
import org.apache.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.archiva.xml.XMLException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@@ -43,8 +43,6 @@ import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
@@ -278,19 +276,9 @@ public class Maven2RepositoryMerger
private void copyFile( File sourceFile, File targetFile )
throws IOException
{
FileOutputStream out = new FileOutputStream( targetFile );
FileInputStream input = new FileInputStream( sourceFile );

// IOUtils internally buffers the streams
try
{
IOUtils.copy( input, out );
}
finally
{
IOUtils.closeQuietly( out );
IOUtils.closeQuietly( input );
}
FileUtils.copyFile( sourceFile, targetFile );

}

private void updateProjectMetadata( File projectMetaDataFileIntargetRepo, ArtifactMetadata artifactMetadata,

Loading…
Cancel
Save