import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
-
+
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
+ /**
+ * @plexus.requirement role-hint="effective-project-cache"
+ */
+ private Cache effectiveProjectCache;
+
public void beginScan()
{
// TODO Auto-generated method stub
}
public List<String> getIncludedTypes()
- {
- return null;
+ {
+ return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
- {
- if ( !StringUtils.equals( "pom", artifact.getType() ) )
+ {
+ if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
- // Not a pom. Skip it.
+ // Not a pom. Skip it.
return;
}
-
- try
- {
- ManagedRepositoryContent repositoryContent =
- repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-
- File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
-
- if( !file.exists() )
- {
- ArchivaProjectModel projectModel = projectModelDAO.getProjectModel(
- artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
-
- projectModelDAO.deleteProjectModel( projectModel );
- }
- }
- catch ( RepositoryException re )
- {
- throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
- re.getMessage() );
- }
- catch ( ArchivaDatabaseException e )
- {
+
+ try
+ {
+ ManagedRepositoryContent repositoryContent =
+ repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
+
+ File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
+
+ if ( !file.exists() )
+ {
+ ArchivaProjectModel projectModel =
+ projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
+ artifact.getVersion() );
+
+ projectModelDAO.deleteProjectModel( projectModel );
+
+ // Force removal of project model from effective cache
+ String projectKey = toProjectKey( projectModel );
+ synchronized ( effectiveProjectCache )
+ {
+ if ( effectiveProjectCache.hasKey( projectKey ) )
+ {
+ effectiveProjectCache.remove( projectKey );
+ }
+ }
+ }
+ }
+ catch ( RepositoryException re )
+ {
+ throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
+ }
+ catch ( ArchivaDatabaseException e )
+ {
throw new ConsumerException( e.getMessage() );
- }
-
+ }
+
}
public String getDescription()
public boolean isPermanent()
{
return false;
- }
-
+ }
+
public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
{
this.projectModelDAO = projectModelDAO;
}
-
+
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
+ public void setEffectiveProjectCache( Cache effectiveProjectCache )
+ {
+ this.effectiveProjectCache = effectiveProjectCache;
+ }
+
+ private String toProjectKey( ArchivaProjectModel project )
+ {
+ StringBuilder key = new StringBuilder();
+
+ key.append( project.getGroupId() ).append( ":" );
+ key.append( project.getArtifactId() ).append( ":" );
+ key.append( project.getVersion() );
+
+ return key.toString();
+ }
}
import org.apache.maven.archiva.repository.project.ProjectModelFilter;
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
+import org.codehaus.plexus.cache.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
implements DatabaseUnprocessedArtifactConsumer
{
private Logger log = LoggerFactory.getLogger( ProjectModelToDatabaseConsumer.class );
-
+
/**
* @plexus.configuration default-value="update-db-project"
*/
private List<String> includes;
+ /**
+ * @plexus.requirement role-hint="effective-project-cache"
+ */
+ private Cache effectiveProjectCache;
+
public ProjectModelToDatabaseConsumer()
{
includes = new ArrayList<String>();
// Not a pom. Skip it.
return;
}
-
- if ( hasProjectModelInDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) )
+
+ ArchivaProjectModel model = null;
+
+ // remove old project model if it already exists in the database
+ if ( ( model =
+ getProjectModelFromDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) ) != null )
{
- // Already in the database. Skip it.
- return;
+ removeOldProjectModel( model );
+ model = null;
}
ManagedRepositoryContent repo = getRepository( artifact );
try
{
- ArchivaProjectModel model = reader.read( artifactFile );
+ model = reader.read( artifactFile );
model.setOrigin( "filesystem" );
-
+
// The version should be updated to the artifact/filename version if it is a unique snapshot
if ( VersionUtil.isUniqueSnapshot( artifact.getVersion() ) )
{
if ( isValidModel( model, repo, artifact ) )
{
- log.debug( "Adding project model to database - " + Keys.toKey( model ) );
+ log.debug( "Adding project model to database - " + Keys.toKey( model ) );
dao.getProjectModelDAO().saveProjectModel( model );
}
else
{
- log.warn(
- "Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
+ log.warn( "Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
}
}
}
catch ( ArchivaDatabaseException e )
{
- log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(),
- e );
+ log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(), e );
}
catch ( Throwable t )
{
// Catch the other errors in the process to allow the rest of the process to complete.
- log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() +
- " : " + t.getMessage(), t );
+ log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() + " : " +
+ t.getMessage(), t );
}
}
- private boolean hasProjectModelInDatabase( String groupId, String artifactId, String version )
+ private ArchivaProjectModel getProjectModelFromDatabase( String groupId, String artifactId, String version )
{
try
{
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
- return ( model != null );
+ return model;
}
catch ( ObjectNotFoundException e )
{
- return false;
+ return null;
}
catch ( ArchivaDatabaseException e )
{
- return false;
+ return null;
}
}
appendModel( emsg, model );
emsg.append( "]: The model artifactId [" ).append( model.getArtifactId() );
emsg.append( "] does not match the artifactId portion of the filename: " ).append( artifact.getArtifactId() );
-
- log.warn(emsg.toString() );
+
+ log.warn( emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
appendModel( emsg, model );
emsg.append( "]; The model version [" ).append( model.getVersion() );
emsg.append( "] does not match the version portion of the filename: " ).append( artifact.getVersion() );
-
- log.warn(emsg.toString() );
+
+ log.warn( emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
throws ConsumerException
{
ManagedRepositoryContent repo = getRepository( artifact );
-
+
RepositoryProblem problem = new RepositoryProblem();
problem.setRepositoryId( artifact.getModel().getRepositoryId() );
problem.setPath( repo.toPath( artifact ) );
}
}
+ private String toProjectKey( ArchivaProjectModel project )
+ {
+ StringBuilder key = new StringBuilder();
+
+ key.append( project.getGroupId() ).append( ":" );
+ key.append( project.getArtifactId() ).append( ":" );
+ key.append( project.getVersion() );
+
+ return key.toString();
+ }
+
+ private void removeOldProjectModel( ArchivaProjectModel model )
+ {
+ try
+ {
+ dao.getProjectModelDAO().deleteProjectModel( model );
+ }
+ catch ( ArchivaDatabaseException ae )
+ {
+ log.error( "Unable to delete existing project model." );
+ }
+
+ // Force removal of project model from effective cache
+ String projectKey = toProjectKey( model );
+ synchronized ( effectiveProjectCache )
+ {
+ if ( effectiveProjectCache.hasKey( projectKey ) )
+ {
+ effectiveProjectCache.remove( projectKey );
+ }
+ }
+ }
+
}