import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
-
+
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
+ /**
+ * @plexus.requirement role-hint="effective-project-cache"
+ */
+ private Cache effectiveProjectCache;
+
public void beginScan()
{
// TODO Auto-generated method stub
}
public List<String> getIncludedTypes()
- {
- return null;
+ {
+ return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
- {
- if ( !StringUtils.equals( "pom", artifact.getType() ) )
+ {
+ if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
- // Not a pom. Skip it.
+ // Not a pom. Skip it.
return;
}
-
- try
- {
- ManagedRepositoryContent repositoryContent =
- repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-
- File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
-
- if( !file.exists() )
- {
- ArchivaProjectModel projectModel = projectModelDAO.getProjectModel(
- artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
-
- projectModelDAO.deleteProjectModel( projectModel );
- }
- }
- catch ( RepositoryException re )
- {
- throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
- re.getMessage() );
- }
- catch ( ArchivaDatabaseException e )
- {
+
+ try
+ {
+ ManagedRepositoryContent repositoryContent =
+ repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
+
+ File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
+
+ if ( !file.exists() )
+ {
+ ArchivaProjectModel projectModel =
+ projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
+ artifact.getVersion() );
+
+ projectModelDAO.deleteProjectModel( projectModel );
+
+ // Force removal of project model from effective cache
+ String projectKey = toProjectKey( projectModel );
+ synchronized ( effectiveProjectCache )
+ {
+ if ( effectiveProjectCache.hasKey( projectKey ) )
+ {
+ effectiveProjectCache.remove( projectKey );
+ }
+ }
+ }
+ }
+ catch ( RepositoryException re )
+ {
+ throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
+ }
+ catch ( ArchivaDatabaseException e )
+ {
throw new ConsumerException( e.getMessage() );
- }
-
+ }
+
}
public String getDescription()
public boolean isPermanent()
{
return false;
- }
-
+ }
+
public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
{
this.projectModelDAO = projectModelDAO;
}
-
+
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
+ public void setEffectiveProjectCache( Cache effectiveProjectCache )
+ {
+ this.effectiveProjectCache = effectiveProjectCache;
+ }
+
+ private String toProjectKey( ArchivaProjectModel project )
+ {
+ StringBuilder key = new StringBuilder();
+
+ key.append( project.getGroupId() ).append( ":" );
+ key.append( project.getArtifactId() ).append( ":" );
+ key.append( project.getVersion() );
+
+ return key.toString();
+ }
}
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
+import org.codehaus.plexus.cache.Cache;
+
import java.io.File;
import java.util.ArrayList;
import java.util.List;
private List<String> includes;
+ /**
+ * @plexus.requirement role-hint="effective-project-cache"
+ */
+ private Cache effectiveProjectCache;
+
public ProjectModelToDatabaseConsumer()
{
includes = new ArrayList<String>();
// Not a pom. Skip it.
return;
}
-
- if ( hasProjectModelInDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) )
+
+ ArchivaProjectModel model = null;
+
+ // remove old project model if it already exists in the database
+ if ( ( model =
+ getProjectModelFromDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) ) != null )
{
- // Already in the database. Skip it.
- return;
+ removeOldProjectModel( model );
+ model = null;
}
ManagedRepositoryContent repo = getRepository( artifact );
try
{
- ArchivaProjectModel model = reader.read( artifactFile );
+ model = reader.read( artifactFile );
model.setOrigin( "filesystem" );
-
+
// The version should be updated to the artifact/filename version if it is a unique snapshot
if ( VersionUtil.isUniqueSnapshot( artifact.getVersion() ) )
{
if ( isValidModel( model, repo, artifact ) )
{
getLogger().debug( "Adding project model to database - " + Keys.toKey( model ) );
+
dao.getProjectModelDAO().saveProjectModel( model );
}
else
}
}
- private boolean hasProjectModelInDatabase( String groupId, String artifactId, String version )
+ private ArchivaProjectModel getProjectModelFromDatabase( String groupId, String artifactId, String version )
{
try
{
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
- return ( model != null );
+ return model;
}
catch ( ObjectNotFoundException e )
{
- return false;
+ return null;
}
catch ( ArchivaDatabaseException e )
{
- return false;
+ return null;
}
}
appendModel( emsg, model );
emsg.append( "]: The model artifactId [" ).append( model.getArtifactId() );
emsg.append( "] does not match the artifactId portion of the filename: " ).append( artifact.getArtifactId() );
-
+
getLogger().warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
appendModel( emsg, model );
emsg.append( "]; The model version [" ).append( model.getVersion() );
emsg.append( "] does not match the version portion of the filename: " ).append( artifact.getVersion() );
-
+
getLogger().warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
throws ConsumerException
{
ManagedRepositoryContent repo = getRepository( artifact );
-
+
RepositoryProblem problem = new RepositoryProblem();
problem.setRepositoryId( artifact.getModel().getRepositoryId() );
problem.setPath( repo.toPath( artifact ) );
}
}
+ private String toProjectKey( ArchivaProjectModel project )
+ {
+ StringBuilder key = new StringBuilder();
+
+ key.append( project.getGroupId() ).append( ":" );
+ key.append( project.getArtifactId() ).append( ":" );
+ key.append( project.getVersion() );
+
+ return key.toString();
+ }
+
+ private void removeOldProjectModel( ArchivaProjectModel model )
+ {
+ try
+ {
+ dao.getProjectModelDAO().deleteProjectModel( model );
+ }
+ catch ( ArchivaDatabaseException ae )
+ {
+ getLogger().error( "Unable to delete existing project model." );
+ }
+
+ // Force removal of project model from effective cache
+ String projectKey = toProjectKey( model );
+ synchronized ( effectiveProjectCache )
+ {
+ if ( effectiveProjectCache.hasKey( projectKey ) )
+ {
+ effectiveProjectCache.remove( projectKey );
+ }
+ }
+ }
+
}
* under the License.
*/
+import org.codehaus.plexus.cache.Cache;
import org.easymock.MockControl;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
private ProjectModelDAO projectModelDAOMock;
private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
+
+ private Cache effectiveProjectCache;
public void setUp()
throws Exception
projectModelDAOControl = MockControl.createControl( ProjectModelDAO.class );
- projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
+ projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
+
+ effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
+
+ dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
}
public void testIfArtifactWasNotDeleted()
</requirements>
</component>
</components>
+
+ <component>
+ <role>org.codehaus.plexus.cache.Cache</role>
+ <role-hint>effective-project-cache</role-hint>
+ <implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
+ <description>Effective Project Cache</description>
+ <configuration>
+ <disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
+ <disk-persistent>true</disk-persistent>
+ <disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
+ <eternal>true</eternal>
+ <max-elements-in-memory>1000</max-elements-in-memory>
+ <memory-eviction-policy>LRU</memory-eviction-policy>
+ <name>effective-project-cache</name>
+ <overflow-to-disk>false</overflow-to-disk>
+ <!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
+ <!-- 45 minutes = 2700 seconds -->
+ <time-to-idle-seconds>2700</time-to-idle-seconds>
+ <!-- 30 minutes = 1800 seconds -->
+ <time-to-live-seconds>1800</time-to-live-seconds>
+ </configuration>
+ </component>
</component-set>