<name>cleanupConsumers</name>
<version>1.0.0+</version>
<required>true</required>
+ <deprecated>true</deprecated>
<association>
<type>String</type>
<multiplicity>*</multiplicity>
+++ /dev/null
-package org.apache.maven.archiva.consumers.database;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
-import org.apache.maven.archiva.database.Constraint;
-import org.apache.maven.archiva.database.RepositoryProblemDAO;
-import org.apache.maven.archiva.database.constraints.RepositoryProblemByArtifactConstraint;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.model.RepositoryProblem;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.RepositoryContentFactory;
-import org.apache.maven.archiva.repository.RepositoryException;
-
-import java.util.List;
-import java.io.File;
-
-/**
- * Consumer for cleaning up the database of artifacts that are no longer existing in the repository.
- *
- * <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version $Id$
- *
- * @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
- * role-hint="not-present-remove-db-artifact"
- * instantiation-strategy="per-lookup"
- */
-public class DatabaseCleanupRemoveArtifactConsumer
- extends AbstractMonitoredConsumer
- implements DatabaseCleanupConsumer
-{
- /**
- * @plexus.configuration default-value="not-present-remove-db-artifact"
- */
- private String id;
-
- /**
- * @plexus.configuration default-value="Remove artifact from database if not present on filesystem."
- */
- private String description;
-
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArtifactDAO artifactDAO;
-
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private RepositoryProblemDAO repositoryProblemDAO;
-
- /**
- * @plexus.requirement
- */
- private RepositoryContentFactory repositoryFactory;
-
- public void beginScan()
- {
- // TODO Auto-generated method stub
-
- }
-
- public void completeScan()
- {
- // TODO Auto-generated method stub
- }
-
- public List<String> getIncludedTypes()
- {
- return null;
- }
-
- public void processArchivaArtifact( ArchivaArtifact artifact )
- throws ConsumerException
- {
- try
- {
- ManagedRepositoryContent repositoryContent =
- repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-
- File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
-
- if( !file.exists() )
- {
- artifactDAO.deleteArtifact( artifact );
-
- // Remove all repository problems related to this artifact
- Constraint artifactConstraint = new RepositoryProblemByArtifactConstraint( artifact );
- List<RepositoryProblem> repositoryProblems =
- repositoryProblemDAO.queryRepositoryProblems( artifactConstraint );
-
- if ( repositoryProblems != null )
- {
- for ( RepositoryProblem repositoryProblem : repositoryProblems )
- {
- repositoryProblemDAO.deleteRepositoryProblem( repositoryProblem );
- }
- }
- }
- }
- catch ( RepositoryException re )
- {
- throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
- re.getMessage() );
- }
- catch ( ArchivaDatabaseException e )
- {
- throw new ConsumerException( e.getMessage() );
- }
- }
-
- public String getDescription()
- {
- return description;
- }
-
- public String getId()
- {
- return id;
- }
-
- public boolean isPermanent()
- {
- return false;
- }
-
- public void setArtifactDAO( ArtifactDAO artifactDAO)
- {
- this.artifactDAO = artifactDAO;
- }
-
- public void setRepositoryProblemDAO( RepositoryProblemDAO repositoryProblemDAO )
- {
- this.repositoryProblemDAO = repositoryProblemDAO;
- }
-
- public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
- {
- this.repositoryFactory = repositoryFactory;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.database;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.model.ArchivaProjectModel;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.RepositoryContentFactory;
-import org.apache.maven.archiva.repository.RepositoryException;
-import org.apache.maven.archiva.database.ProjectModelDAO;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.codehaus.plexus.cache.Cache;
-
-import java.util.List;
-import java.io.File;
-
-/**
- * Consumer for removing or deleting from the database the project models fo artifacts that have been
- * deleted/removed from the repository.
- *
- * <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version $Id$
- *
- * @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
- * role-hint="not-present-remove-db-project"
- * instantiation-strategy="per-lookup"
- */
-public class DatabaseCleanupRemoveProjectConsumer
- extends AbstractMonitoredConsumer
- implements DatabaseCleanupConsumer
-{
- /**
- * @plexus.configuration default-value="not-present-remove-db-project"
- */
- private String id;
-
- /**
- * @plexus.configuration default-value="Remove project from database if not present on filesystem."
- */
- private String description;
-
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ProjectModelDAO projectModelDAO;
-
- /**
- * @plexus.requirement
- */
- private RepositoryContentFactory repositoryFactory;
-
- /**
- * @plexus.requirement role-hint="effective-project-cache"
- */
- private Cache effectiveProjectCache;
-
- public void beginScan()
- {
- // TODO Auto-generated method stub
- }
-
- public void completeScan()
- {
- // TODO Auto-generated method stub
- }
-
- public List<String> getIncludedTypes()
- {
- return null;
- }
-
- public void processArchivaArtifact( ArchivaArtifact artifact )
- throws ConsumerException
- {
- if ( !StringUtils.equals( "pom", artifact.getType() ) )
- {
- // Not a pom. Skip it.
- return;
- }
-
- try
- {
- ManagedRepositoryContent repositoryContent =
- repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-
- File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
-
- if ( !file.exists() )
- {
- ArchivaProjectModel projectModel =
- projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion() );
-
- projectModelDAO.deleteProjectModel( projectModel );
-
- // Force removal of project model from effective cache
- String projectKey = toProjectKey( projectModel );
- synchronized ( effectiveProjectCache )
- {
- if ( effectiveProjectCache.hasKey( projectKey ) )
- {
- effectiveProjectCache.remove( projectKey );
- }
- }
- }
- }
- catch ( RepositoryException re )
- {
- throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
- }
- catch ( ArchivaDatabaseException e )
- {
- throw new ConsumerException( e.getMessage() );
- }
-
- }
-
- public String getDescription()
- {
- return description;
- }
-
- public String getId()
- {
- return id;
- }
-
- public boolean isPermanent()
- {
- return false;
- }
-
- public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
- {
- this.projectModelDAO = projectModelDAO;
- }
-
- public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
- {
- this.repositoryFactory = repositoryFactory;
- }
-
- public void setEffectiveProjectCache( Cache effectiveProjectCache )
- {
- this.effectiveProjectCache = effectiveProjectCache;
- }
-
- private String toProjectKey( ArchivaProjectModel project )
- {
- StringBuilder key = new StringBuilder();
-
- key.append( project.getGroupId() ).append( ":" );
- key.append( project.getArtifactId() ).append( ":" );
- key.append( project.getVersion() );
-
- return key.toString();
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.database;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.easymock.MockControl;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.database.RepositoryProblemDAO;
-
-/**
- * Test for DatabaseCleanupRemoveArtifactConsumerTest
- *
- */
-public class DatabaseCleanupRemoveArtifactConsumerTest
- extends AbstractDatabaseCleanupTest
-{
- private MockControl artifactDAOControl;
-
- private ArtifactDAO artifactDAOMock;
-
- private MockControl repositoryProblemDAOControl;
-
- private RepositoryProblemDAO repositoryProblemDAOMock;
-
- private DatabaseCleanupRemoveArtifactConsumer dbCleanupRemoveArtifactConsumer;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- dbCleanupRemoveArtifactConsumer = new DatabaseCleanupRemoveArtifactConsumer();
-
- artifactDAOControl = MockControl.createControl( ArtifactDAO.class );
-
- artifactDAOMock = (ArtifactDAO) artifactDAOControl.getMock();
-
- repositoryProblemDAOControl = MockControl.createControl( RepositoryProblemDAO.class );
-
- repositoryProblemDAOMock = (RepositoryProblemDAO) repositoryProblemDAOControl.getMock();
-
- dbCleanupRemoveArtifactConsumer.setArtifactDAO( artifactDAOMock );
-
- dbCleanupRemoveArtifactConsumer.setRepositoryProblemDAO( repositoryProblemDAOMock );
-
- dbCleanupRemoveArtifactConsumer.setRepositoryFactory( repositoryFactory );
- }
-
- public void testIfArtifactWasNotDeleted()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "jar" );
-
- artifactDAOControl.replay();
-
- repositoryProblemDAOControl.replay();
-
- dbCleanupRemoveArtifactConsumer.processArchivaArtifact( artifact );
-
- artifactDAOControl.verify();
-
- repositoryProblemDAOControl.verify();
- }
-
- public void testIfArtifactWasDeleted()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION, "jar" );
-
- artifactDAOMock.deleteArtifact( artifact );
-
- artifactDAOControl.replay();
-
- dbCleanupRemoveArtifactConsumer.processArchivaArtifact( artifact );
-
- artifactDAOControl.verify();
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.database;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.codehaus.plexus.cache.Cache;
-import org.easymock.MockControl;
-import org.apache.maven.archiva.database.ProjectModelDAO;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.model.ArchivaProjectModel;
-
-/**
- * Test for DatabaseCleanupRemoveProjectConsumer
- *
- */
-public class DatabaseCleanupRemoveProjectConsumerTest
- extends AbstractDatabaseCleanupTest
-{
- private MockControl projectModelDAOControl;
-
- private ProjectModelDAO projectModelDAOMock;
-
- private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
-
- private Cache effectiveProjectCache;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- dbCleanupRemoveProjectConsumer = new DatabaseCleanupRemoveProjectConsumer();
-
- projectModelDAOControl = MockControl.createControl( ProjectModelDAO.class );
-
- projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
-
- effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
-
- dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
-
- dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
-
- dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
- }
-
- public void testIfArtifactWasNotDeleted()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "pom" );
-
- projectModelDAOControl.replay();
-
- dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
-
- projectModelDAOControl.verify();
- }
-
- public void testIfArtifactWasDeleted()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION, "pom" );
-
- ArchivaProjectModel projectModel = createProjectModel( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION );
-
- //this should return a value
- projectModelDAOControl.expectAndReturn(
- projectModelDAOMock.getProjectModel( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION ),
- (ArchivaProjectModel) projectModel );
-
- projectModelDAOMock.deleteProjectModel( projectModel );
-
- projectModelDAOControl.replay();
-
- dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
-
- projectModelDAOControl.verify();
- }
-
- public void testIfArtifactWasNotAPom()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "jar" );
-
- projectModelDAOControl.replay();
-
- dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
-
- projectModelDAOControl.verify();
- }
-
- public void tearDown()
- throws Exception
- {
- super.tearDown();
- }
-
-}
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>database-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>database-cleanup</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
- <field-name>prePolicies</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
- <field-name>postPolicies</field-name>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>database-cleanup</role-hint>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <configuration>
- <properties>
- <xml fileName="${basedir}/target/test/repository-manager.xml" config-optional="true" config-forceCreate="true"
- config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
-
- <component>
- <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
- <implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>database-cleanup</role-hint>
- </requirement>
- </requirements>
- </component>
- </components>
-</component-set>
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<component-set>
- <components>
- <component>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>database-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>database-cleanup</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
- <field-name>prePolicies</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
- <field-name>postPolicies</field-name>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>database-cleanup</role-hint>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <configuration>
- <properties>
- <xml fileName="${basedir}/target/test/repository-manager.xml" config-optional="true" config-forceCreate="true"
- config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
-
- <component>
- <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
- <implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>database-cleanup</role-hint>
- </requirement>
- </requirements>
- </component>
- </components>
-
- <component>
- <role>org.codehaus.plexus.cache.Cache</role>
- <role-hint>effective-project-cache</role-hint>
- <implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
- <description>Effective Project Cache</description>
- <configuration>
- <disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
- <disk-persistent>true</disk-persistent>
- <disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
- <eternal>true</eternal>
- <max-elements-in-memory>1000</max-elements-in-memory>
- <memory-eviction-policy>LRU</memory-eviction-policy>
- <name>effective-project-cache</name>
- <overflow-to-disk>false</overflow-to-disk>
- <!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
- <!-- 45 minutes = 2700 seconds -->
- <time-to-idle-seconds>2700</time-to-idle-seconds>
- <!-- 30 minutes = 1800 seconds -->
- <time-to-live-seconds>1800</time-to-live-seconds>
- </configuration>
- </component>
-</component-set>
+++ /dev/null
-package org.apache.maven.archiva.consumers.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
-import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
-import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.RepositoryContentFactory;
-import org.apache.maven.archiva.repository.RepositoryException;
-
-import java.io.File;
-import java.util.List;
-
-/**
- * LuceneCleanupRemoveIndexedConsumer
- *
- * @version $Id$
- * @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
- * role-hint="not-present-remove-indexed" instantiation-strategy="per-lookup"
- */
-public class LuceneCleanupRemoveIndexedConsumer
- extends AbstractMonitoredConsumer
- implements DatabaseCleanupConsumer
-{
- /**
- * @plexus.configuration default-value="not-present-remove-indexed"
- */
- private String id;
-
- /**
- * @plexus.configuration default-value="Remove indexed content if not present on filesystem."
- */
- private String description;
-
- /**
- * @plexus.requirement role-hint="lucene"
- */
- private RepositoryContentIndexFactory repoIndexFactory;
-
- /**
- * @plexus.requirement
- */
- private RepositoryContentFactory repoFactory;
-
- public void beginScan()
- {
- // TODO Auto-generated method stub
-
- }
-
- public void completeScan()
- {
- // TODO Auto-generated method stub
-
- }
-
- public List<String> getIncludedTypes()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void processArchivaArtifact( ArchivaArtifact artifact )
- throws ConsumerException
- {
- try
- {
- ManagedRepositoryContent repoContent =
- repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
-
- File file = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
-
- if( !file.exists() )
- {
- RepositoryContentIndex bytecodeIndex = repoIndexFactory.createBytecodeIndex( repoContent.getRepository() );
- RepositoryContentIndex hashcodesIndex = repoIndexFactory.createHashcodeIndex( repoContent.getRepository() );
- RepositoryContentIndex fileContentIndex =
- repoIndexFactory.createFileContentIndex( repoContent.getRepository() );
-
- FileContentRecord fileContentRecord = new FileContentRecord();
- fileContentRecord.setFilename( repoContent.toPath( artifact ) );
- fileContentIndex.deleteRecord( fileContentRecord );
-
- HashcodesRecord hashcodesRecord = new HashcodesRecord();
- hashcodesRecord.setArtifact( artifact );
- hashcodesIndex.deleteRecord( hashcodesRecord );
-
- BytecodeRecord bytecodeRecord = new BytecodeRecord();
- bytecodeRecord.setArtifact( artifact );
- bytecodeIndex.deleteRecord( bytecodeRecord );
- }
- }
- catch ( RepositoryException e )
- {
- throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
- }
- catch ( RepositoryIndexException e )
- {
- throw new ConsumerException( e.getMessage() );
- }
- }
-
- public String getDescription()
- {
- return description;
- }
-
- public String getId()
- {
- return id;
- }
-
- public boolean isPermanent()
- {
- return false;
- }
-
- public void setRepositoryIndexFactory( RepositoryContentIndexFactory repoIndexFactory )
- {
- this.repoIndexFactory = repoIndexFactory;
- }
-
- public void setRepositoryContentFactory( RepositoryContentFactory repoFactory )
- {
- this.repoFactory = repoFactory;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.lucene;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.model.ArchivaArtifactModel;
-import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-
-/**
- * LuceneCleanupRemoveIndexedConsumerTest
- *
- * @version
- */
-public class LuceneCleanupRemoveIndexedConsumerTest
- extends PlexusInSpringTestCase
-{
- private DatabaseCleanupConsumer luceneCleanupRemoveIndexConsumer;
-
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- luceneCleanupRemoveIndexConsumer = (DatabaseCleanupConsumer)
- lookup( DatabaseCleanupConsumer.class, "lucene-cleanup" );
- }
-
- public void testIfArtifactExists()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact(
- "org.apache.maven.archiva", "archiva-lucene-cleanup", "1.0", "jar" );
-
- luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
- }
-
- public void testIfArtifactDoesNotExist()
- throws Exception
- {
- ArchivaArtifact artifact = createArtifact(
- "org.apache.maven.archiva", "deleted-artifact", "1.0", "jar" );
-
- luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
- }
-
- private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
- {
- ArchivaArtifactModel model = new ArchivaArtifactModel();
- model.setGroupId( groupId );
- model.setArtifactId( artifactId );
- model.setVersion( version );
- model.setType( type );
- model.setRepositoryId( "test-repo" );
-
- return new ArchivaArtifact( model );
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.lucene.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
-
-/**
- * LuceneRepositoryContenIndexFactoryStub
- *
- * @version
- */
-public class LuceneRepositoryContentIndexFactoryStub
- implements RepositoryContentIndexFactory
-{
-
- public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub();
- }
-
- public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub();
- }
-
- public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub();
- }
-
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.lucene.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.util.Collection;
-
-import junit.framework.Assert;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Searchable;
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
-import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
-
-/**
- * @version
- */
-public class LuceneRepositoryContentIndexStub
- implements RepositoryContentIndex
-{
-
- public void deleteRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- Assert.assertEquals( 2, records.size() );
- }
-
- public boolean exists()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- public Collection getAllRecordKeys()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Analyzer getAnalyzer()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public LuceneEntryConverter getEntryConverter()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public String getId()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public File getIndexDirectory()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public QueryParser getQueryParser()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public ManagedRepositoryConfiguration getRepository()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Searchable getSearchable()
- throws RepositoryIndexSearchException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void indexRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void deleteRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- Assert.assertNotNull( record );
-
- // fail since the record to be deleted should only be the deleted-artifact-1.0.jar
- // according to the tests
- if( record.getPrimaryKey().equals(
- "org/apache/maven/archiva/archiva-lucene-cleanup/1.0/archiva-lucene-cleanup-1.0.jar" ) &&
- record.getPrimaryKey().equals( "org.apache.maven.archiva:archiva-lucene-cleanup:1.0:jar" ) )
- {
- Assert.fail();
- }
- }
-
-}
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<component-set>
- <components>
-
- <component>
- <role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
- <role-hint>lucene-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene-cleanup</role-hint>
- <field-name>repoIndexFactory</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
- <implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>lucene-cleanup</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
- <role-hint>lucene-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>lucene-cleanup</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
- <field-name>prePolicies</field-name>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
- <field-name>postPolicies</field-name>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.codehaus.plexus.registry.Registry</role>
- <role-hint>lucene-cleanup</role-hint>
- <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
- <configuration>
- <properties>
- <xml fileName="${basedir}/src/test/conf/repository-manager.xml"
- config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
- </properties>
- </configuration>
- </component>
- <component>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <implementation>org.apache.maven.archiva.consumers.lucene.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
- <role-hint>lucene-cleanup</role-hint>
- </component>
-
- </components>
-</component-set>
* under the License.
*/
+import java.util.List;
+
+import org.apache.maven.archiva.database.constraints.RepositoryProblemByArtifactConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.ArchivaProjectModel;
+import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.codehaus.plexus.cache.Cache;
/**
* Process repository management events and respond appropriately.
*/
private ArtifactDAO artifactDAO;
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private RepositoryProblemDAO repositoryProblemDAO;
+
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private ProjectModelDAO projectModelDAO;
+
+ /**
+ * @plexus.requirement role-hint="effective-project-cache"
+ */
+ private Cache effectiveProjectCache;
+
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
// ignored
}
- // TODO [MRM-37]: re-run the database consumers to clean up
+ try
+ {
+ // Remove all repository problems related to this artifact
+ Constraint artifactConstraint = new RepositoryProblemByArtifactConstraint( artifact );
+ List<RepositoryProblem> repositoryProblems =
+ repositoryProblemDAO.queryRepositoryProblems( artifactConstraint );
+
+ if ( repositoryProblems != null )
+ {
+ for ( RepositoryProblem repositoryProblem : repositoryProblems )
+ {
+ repositoryProblemDAO.deleteRepositoryProblem( repositoryProblem );
+ }
+ }
+ }
+ catch ( ArchivaDatabaseException e )
+ {
+ // ignored
+ }
+
+ if ( "pom".equals( artifact.getType() ) )
+ {
+ try
+ {
+ ArchivaProjectModel projectModel =
+ projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
+ artifact.getVersion() );
+
+ projectModelDAO.deleteProjectModel( projectModel );
+
+ // Force removal of project model from effective cache
+ String projectKey = toProjectKey( projectModel );
+ synchronized ( effectiveProjectCache )
+ {
+ if ( effectiveProjectCache.hasKey( projectKey ) )
+ {
+ effectiveProjectCache.remove( projectKey );
+ }
+ }
+ }
+ catch ( ArchivaDatabaseException e )
+ {
+ // ignored
+ }
+ }
+ }
+
+ private String toProjectKey( ArchivaProjectModel project )
+ {
+ StringBuilder key = new StringBuilder();
+
+ key.append( project.getGroupId() ).append( ":" );
+ key.append( project.getArtifactId() ).append( ":" );
+ key.append( project.getVersion() );
+
+ return key.toString();
}
}
+++ /dev/null
-package org.apache.maven.archiva.database.updater;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * DatabaseCleanupConsumer
- *
- * @version $Id$
- */
-public interface DatabaseCleanupConsumer
- extends ArchivaArtifactConsumer
-{
-
-}
*/
private List availableUnprocessedConsumers;
- /**
- * @plexus.requirement role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
- */
- private List availableCleanupConsumers;
-
- private Predicate selectedCleanupConsumers;
-
private Predicate selectedUnprocessedConsumers;
class SelectedUnprocessedConsumersPredicate
}
}
- class SelectedCleanupConsumersPredicate
- implements Predicate
- {
- public boolean evaluate( Object object )
- {
- boolean satisfies = false;
-
- if ( object instanceof DatabaseCleanupConsumer )
- {
- DatabaseCleanupConsumer consumer = (DatabaseCleanupConsumer) object;
- DatabaseScanningConfiguration config = archivaConfiguration.getConfiguration().getDatabaseScanning();
-
- return config.getCleanupConsumers().contains( consumer.getId() );
- }
-
- return satisfies;
- }
- }
-
public void initialize()
throws InitializationException
{
Predicate permanentConsumers = new PermanentConsumerPredicate();
- selectedCleanupConsumers = new OrPredicate( permanentConsumers, new SelectedCleanupConsumersPredicate() );
selectedUnprocessedConsumers = new OrPredicate( permanentConsumers, new SelectedUnprocessedConsumersPredicate() );
}
return ret;
}
- /**
- * Get the {@link List} of {@link DatabaseCleanupConsumer} objects for those
- * consumers selected due to the configuration.
- *
- * @return the list of selected {@link DatabaseCleanupConsumer} objects.
- */
- public List getSelectedCleanupConsumers()
- {
- List ret = new ArrayList();
- ret.addAll( CollectionUtils.select( availableCleanupConsumers, selectedCleanupConsumers ) );
- return ret;
- }
-
/**
* Get the complete {@link List} of {@link DatabaseUnprocessedArtifactConsumer} objects
* that are available in the system, regardless of configuration.
{
return Collections.unmodifiableList( this.availableUnprocessedConsumers );
}
-
- /**
- * Get the complete {@link List} of {@link DatabaseCleanupConsumer} objects
- * that are available in the system, regardless of configuration.
- *
- * @return the list of all available {@link DatabaseCleanupConsumer} objects.
- */
- public List getAvailableCleanupConsumers()
- {
- return Collections.unmodifiableList( this.availableCleanupConsumers );
- }
}
*/
public interface DatabaseUpdater
{
- /**
- * Execute the {@link #updateAllUnprocessed()} and {@link #updateAllProcessed()}
- * tasks in one go.
- *
- * @throws ArchivaDatabaseException
- */
- public void update()
- throws ArchivaDatabaseException;
-
/**
* Update all unprocessed content.
*
*/
public void updateUnprocessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException;
-
- /**
- * Update all previously processed content.
- *
- * This is done to allow archiva to remove content from the database that
- * may have been removed from the filesystem too.
- *
- * @throws ArchivaDatabaseException if there was a fatal error with the database.
- */
- public void updateAllProcessed()
- throws ArchivaDatabaseException;
-
- /**
- * Update specific processed content.
- *
- * Example: This is done to allow a specific artifact to be removed from the
- * database if it no longer exists on the filesystem.
- *
- * @throws ArchivaDatabaseException if there was a fatal error with the database.
- */
- public void updateProcessed( ArchivaArtifact artifact )
- throws ArchivaDatabaseException;
}
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.collections.Predicate;
-import org.apache.commons.collections.functors.NotPredicate;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
private ProcessArchivaArtifactClosure processArtifactClosure = new ProcessArchivaArtifactClosure();
- public void update()
- throws ArchivaDatabaseException
- {
- updateAllUnprocessed();
- updateAllProcessed();
- }
-
public void updateAllUnprocessed()
throws ArchivaDatabaseException
{
}
}
- public void updateAllProcessed()
- throws ArchivaDatabaseException
- {
- List processedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( true ) );
-
- beginConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
-
- try
- {
- // Process each consumer.
- Predicate predicate = NotPredicate.getInstance( UnprocessedArtifactPredicate.getInstance() );
-
- Iterator it = IteratorUtils.filteredIterator( processedArtifacts.iterator(), predicate );
- while ( it.hasNext() )
- {
- ArchivaArtifact artifact = (ArchivaArtifact) it.next();
- updateProcessed( artifact );
- }
- }
- finally
- {
- endConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
- }
- }
-
private void endConsumerLifecycle( List consumers )
{
Iterator it = consumers.iterator();
artifact.getModel().setWhenProcessed( new Date() );
dao.getArtifactDAO().saveArtifact( artifact );
}
-
- public void updateProcessed( ArchivaArtifact artifact )
- throws ArchivaDatabaseException
- {
- List consumers = dbConsumers.getSelectedCleanupConsumers();
-
- if ( CollectionUtils.isEmpty( consumers ) )
- {
- log.warn( "There are no selected consumers for artifact cleanup." );
- return;
- }
-
- this.processArtifactClosure.setArtifact( artifact );
- CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
- }
}
* under the License.
*/
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
-import org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer;
-import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
-import org.apache.maven.archiva.model.ArtifactReference;
-import org.apache.maven.archiva.model.VersionedReference;
-import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
-import org.codehaus.plexus.jdo.JdoFactory;
-import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.jpox.SchemaTool;
-
import java.io.File;
import java.net.URL;
import java.text.SimpleDateFormat;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
+import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.model.VersionedReference;
+import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
+import org.codehaus.plexus.jdo.JdoFactory;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.jpox.SchemaTool;
+
/**
* AbstractArchivaDatabaseTestCase
*
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
}
- protected TestDatabaseCleanupConsumer lookupTestCleanupConsumer()
- throws Exception
- {
- TestDatabaseCleanupConsumer consumer = (TestDatabaseCleanupConsumer) lookup( DatabaseCleanupConsumer.class,
- "test-db-cleanup" );
- assertNotNull( "Test Database Cleanup Consumer should not be null.", consumer );
- return consumer;
- }
-
protected TestDatabaseUnprocessedConsumer lookupTestUnprocessedConsumer()
throws Exception
{
listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "database" );
}
+ @SuppressWarnings("unchecked")
public void testWiring()
{
List<RepositoryListener> listeners =
assertEquals( listener, listeners.get( 0 ) );
}
- public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
+ public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao, String type )
{
ArchivaArtifact artifact =
- artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
+ artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", type );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
{
ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
- // Setup artifacts in fresh DB.
- ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao );
- artifactDao.saveArtifact( artifact );
+ ArchivaArtifact pomArtifact = createPom( artifactDao );
+ ArchivaArtifact jarArtifact = createJar( artifactDao );
- assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
- "jar" ) );
+ assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "pom" ) );
+ assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "jar" ) );
- artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
+ jarArtifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
ManagedRepositoryContent repository =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
- listener.deleteArtifact( repository, artifact );
+ listener.deleteArtifact( repository, jarArtifact );
try
{
{
assertTrue( true );
}
+
+ assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "pom" ) );
+ }
+
+ private ArchivaArtifact createJar( ArtifactDAO artifactDao )
+ throws ArchivaDatabaseException
+ {
+ ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao, "jar" );
+ artifactDao.saveArtifact( artifact );
+ return artifact;
+ }
+
+ public void testDeletePomArtifact()
+ throws Exception
+ {
+ ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
+
+ ArchivaArtifact pomArtifact = createPom( artifactDao );
+ ArchivaArtifact jarArtifact = createJar( artifactDao );
+
+ assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "pom" ) );
+ assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "jar" ) );
+
+ pomArtifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "pom" );
+ ManagedRepositoryContent repository =
+ (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
+ listener.deleteArtifact( repository, pomArtifact );
+
+ try
+ {
+ artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "pom" );
+ fail( "Should not find artifact" );
+ }
+ catch ( ObjectNotFoundException e )
+ {
+ assertTrue( true );
+ }
+
+ assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
+ null, "jar" ) );
+ }
+
+ private ArchivaArtifact createPom( ArtifactDAO artifactDao )
+ throws ArchivaDatabaseException
+ {
+ ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao, "pom" );
+ artifactDao.saveArtifact( artifact );
+ return artifact;
}
}
return dbconsumers;
}
- public void testGetAvailableCleanupConsumers()
- throws Exception
- {
- DatabaseConsumers dbconsumers = lookupDbConsumers();
- List available = dbconsumers.getAvailableCleanupConsumers();
- assertNotNull( "Available Cleanup Consumers should never be null.", available );
-
- assertTrue( "Available Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
- }
-
public void testGetAvailableUnprocessedConsumers()
throws Exception
{
assertTrue( "Available Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
- public void testGetSelectedCleanupConsumers()
- throws Exception
- {
- DatabaseConsumers dbconsumers = lookupDbConsumers();
- List available = dbconsumers.getSelectedCleanupConsumers();
- assertNotNull( "Selected Cleanup Consumers should never be null.", available );
-
- assertTrue( "Selected Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
- }
-
public void testGetSelectedUnprocessedConsumers()
throws Exception
{
+++ /dev/null
-package org.apache.maven.archiva.database.updater;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-
-import java.util.List;
-
-/**
- * TestDatabaseCleanupConsumer
- *
- * @version $Id$
- */
-public class TestDatabaseCleanupConsumer
- extends AbstractMonitoredConsumer
- implements DatabaseCleanupConsumer
-{
- private int countBegin = 0;
- private int countComplete = 0;
- private int countProcessed = 0;
-
- public void resetCount()
- {
- countBegin = 0;
- countProcessed = 0;
- countComplete = 0;
- }
-
- public void beginScan()
- {
- countBegin++;
- }
-
- public void completeScan()
- {
- countComplete++;
- }
-
- public List getIncludedTypes()
- {
- return null;
- }
-
- public void processArchivaArtifact( ArchivaArtifact artifact )
- throws ConsumerException
- {
- countProcessed++;
- }
-
- public String getDescription()
- {
- return "Test Consumer for Database Cleanup";
- }
-
- public String getId()
- {
- return "test-db-cleanup";
- }
-
- public boolean isPermanent()
- {
- return false;
- }
-
- public int getCountBegin()
- {
- return countBegin;
- }
-
- public int getCountComplete()
- {
- return countComplete;
- }
-
- public int getCountProcessed()
- {
- return countProcessed;
- }
-}
</configuration>
</component>
- <component>
- <role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
- <role-hint>test-db-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer</implementation>
- </component>
-
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>
throw new TaskExecutionException( "Error running unprocessed updater", e );
}
- try
- {
- log.info( "Task: Updating processed artifacts" );
- databaseUpdater.updateAllProcessed();
- }
- catch ( ArchivaDatabaseException e )
- {
- throw new TaskExecutionException( "Error running processed updater", e );
- }
-
time = System.currentTimeMillis() - time;
log.info( "Finished database task in " + time + "ms." );
+++ /dev/null
-package org.apache.maven.archiva.scheduled;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
-import org.apache.maven.archiva.consumers.ConsumerException;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-
-import java.util.List;
-
-/**
- * TestDatabaseCleanupConsumer
- *
- * @version $Id$
- */
-public class TestDatabaseCleanupConsumer
- extends AbstractMonitoredConsumer
- implements DatabaseCleanupConsumer
-{
- private int countBegin = 0;
- private int countComplete = 0;
- private int countProcessed = 0;
-
- public void resetCount()
- {
- countBegin = 0;
- countProcessed = 0;
- countComplete = 0;
- }
-
- public void beginScan()
- {
- countBegin++;
- }
-
- public void completeScan()
- {
- countComplete++;
- }
-
- public List getIncludedTypes()
- {
- return null;
- }
-
- public void processArchivaArtifact( ArchivaArtifact artifact )
- throws ConsumerException
- {
- countProcessed++;
- }
-
- public String getDescription()
- {
- return "Test Consumer for Database Cleanup";
- }
-
- public String getId()
- {
- return "test-db-cleanup";
- }
-
- public boolean isPermanent()
- {
- return false;
- }
-
- public int getCountBegin()
- {
- return countBegin;
- }
-
- public int getCountComplete()
- {
- return countComplete;
- }
-
- public int getCountProcessed()
- {
- return countProcessed;
- }
-}
</configuration>
</component>
- <component>
- <role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
- <role-hint>test-db-cleanup</role-hint>
- <implementation>org.apache.maven.archiva.scheduled.TestDatabaseCleanupConsumer</implementation>
- </component>
-
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>
* under the License.
*/
-import com.opensymphony.xwork2.Preparable;
+import java.util.Collections;
+import java.util.List;
+
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.IndeterminateConfigurationException;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
-import org.apache.maven.archiva.web.action.admin.scanning.AdminRepositoryConsumerComparator;
import org.apache.maven.archiva.web.action.PlexusActionSupport;
+import org.apache.maven.archiva.web.action.admin.scanning.AdminRepositoryConsumerComparator;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.plexus.registry.RegistryException;
-
-import java.util.Collections;
-import java.util.List;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
+import com.opensymphony.xwork2.Preparable;
+
/**
* DatabaseAction
*
*/
private List enabledUnprocessedConsumers;
- /**
- * List of {@link AdminDatabaseConsumer} objects for "to cleanup" artifacts.
- */
- private List cleanupConsumers;
-
- /**
- * List of enabled {@link AdminDatabaseConsumer} objects for "to cleanup" artifacts.
- */
- private List enabledCleanupConsumers;
-
public void prepare()
throws Exception
{
CollectionUtils.forAllDo( databaseConsumers.getAvailableUnprocessedConsumers(), addAdminDbConsumer );
this.unprocessedConsumers = addAdminDbConsumer.getList();
Collections.sort( this.unprocessedConsumers, AdminRepositoryConsumerComparator.getInstance() );
-
- addAdminDbConsumer = new AddAdminDatabaseConsumerClosure( dbscanning.getCleanupConsumers() );
- CollectionUtils.forAllDo( databaseConsumers.getAvailableCleanupConsumers(), addAdminDbConsumer );
- this.cleanupConsumers = addAdminDbConsumer.getList();
- Collections.sort( this.cleanupConsumers, AdminRepositoryConsumerComparator.getInstance() );
}
public String updateUnprocessedConsumers()
return saveConfiguration();
}
- public String updateCleanupConsumers()
- {
- archivaConfiguration.getConfiguration().getDatabaseScanning().setCleanupConsumers( enabledCleanupConsumers );
-
- return saveConfiguration();
- }
-
public String updateSchedule()
{
archivaConfiguration.getConfiguration().getDatabaseScanning().setCronExpression( cron );
this.cron = cron;
}
- public List getCleanupConsumers()
- {
- return cleanupConsumers;
- }
-
public List getUnprocessedConsumers()
{
return unprocessedConsumers;
{
this.enabledUnprocessedConsumers = enabledUnprocessedConsumers;
}
-
- public List getEnabledCleanupConsumers()
- {
- return enabledCleanupConsumers;
- }
-
- public void setEnabledCleanupConsumers( List enabledCleanupConsumers )
- {
- this.enabledCleanupConsumers = enabledCleanupConsumers;
- }
}
</c:otherwise>
</c:choose>
-<h2>Database - Artifact Cleanup Scanning</h2>
-
-<c:choose>
- <c:when test="${empty (cleanupConsumers)}">
- <%-- No Consumers. Eeek! --%>
- <strong>There are no consumers for artifact cleanup.</strong>
- </c:when>
- <c:otherwise>
- <%-- Display the consumers. --%>
-
- <s:form method="post" action="database!updateCleanupConsumers"
- namespace="/admin" validate="false" theme="simple">
- <table class="consumers">
- <tr>
- <th> </th>
- <th>Enabled?</th>
- <th>ID</th>
- <th>Description</th>
- </tr>
- <c:forEach items="${cleanupConsumers}" var="consumer" varStatus="i">
- <c:choose>
- <c:when test='${(i.index)%2 eq 0}'>
- <c:set var="bgcolor" value="even" scope="page" />
- </c:when>
- <c:otherwise>
- <c:set var="bgcolor" value="odd" scope="page" />
- </c:otherwise>
- </c:choose>
-
- <tr>
- <td class="${bgcolor}">
- <input type="checkbox" name="enabledCleanupConsumers" theme="simple" value="${consumer.id}" <c:if test="${consumer.enabled}">checked</c:if> />
- </td>
- <td class="${bgcolor}">
- <c:if test="${consumer.enabled}">
- <strong>enabled</strong>
- </c:if>
- </td>
- <td class="${bgcolor}">
- <code>${consumer.id}</code>
- </td>
- <td class="${bgcolor}">${consumer.description}</td>
- </tr>
- </c:forEach>
- <tr>
- <td colspan="4">
- <s:submit value="Update Consumers" />
- </td>
- </tr>
- </table>
- </s:form>
-
- </c:otherwise>
-</c:choose>
-
-
</div>
</div>
</body>
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.constraints.ArtifactVersionsConstraint;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
*/
public Boolean configureDatabaseConsumer( String consumerId, boolean enable ) throws Exception
{
- List<DatabaseCleanupConsumer> cleanupConsumers = dbConsumersUtil.getAvailableCleanupConsumers();
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers =
dbConsumersUtil.getAvailableUnprocessedConsumers();
boolean found = false;
- boolean isCleanupConsumer = false;
- for( DatabaseCleanupConsumer consumer : cleanupConsumers )
+
+ for( DatabaseUnprocessedArtifactConsumer consumer : unprocessedConsumers )
{
if( consumer.getId().equals( consumerId ) )
{
found = true;
- isCleanupConsumer = true;
break;
}
}
- if( !found )
- {
- for( DatabaseUnprocessedArtifactConsumer consumer : unprocessedConsumers )
- {
- if( consumer.getId().equals( consumerId ) )
- {
- found = true;
- break;
- }
- }
- }
-
if( !found )
{
throw new Exception( "Invalid database consumer." );
Configuration config = archivaConfiguration.getConfiguration();
DatabaseScanningConfiguration dbScanningConfig = config.getDatabaseScanning();
- if( isCleanupConsumer )
- {
- dbScanningConfig.addCleanupConsumer( consumerId );
- }
- else
- {
- dbScanningConfig.addUnprocessedConsumer( consumerId );
- }
+ dbScanningConfig.addUnprocessedConsumer( consumerId );
config.setDatabaseScanning( dbScanningConfig );
saveConfiguration( config );
{
List<String> consumers = new ArrayList<String>();
- List<DatabaseCleanupConsumer> cleanupConsumers = dbConsumersUtil.getAvailableCleanupConsumers();
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers = dbConsumersUtil.getAvailableUnprocessedConsumers();
- for( DatabaseCleanupConsumer consumer : cleanupConsumers )
- {
- consumers.add( consumer.getId() );
- }
-
for( DatabaseUnprocessedArtifactConsumer consumer : unprocessedConsumers )
{
consumers.add( consumer.getId() );
import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
private RepositoryListener listener;
- private DatabaseCleanupConsumer cleanupIndexConsumer;
-
- private DatabaseCleanupConsumer cleanupDbConsumer;
-
- private MockControl cleanupConsumersControl;
-
protected void setUp()
throws Exception
{
dbConsumersUtilControl = MockClassControl.createControl( DatabaseConsumers.class );
dbConsumersUtil = ( DatabaseConsumers ) dbConsumersUtilControl.getMock();
- cleanupConsumersControl = MockControl.createControl( DatabaseCleanupConsumer.class );
- cleanupIndexConsumer = (DatabaseCleanupConsumer) cleanupConsumersControl.getMock();
- cleanupDbConsumer = (DatabaseCleanupConsumer) cleanupConsumersControl.getMock();
-
unprocessedConsumersControl = MockControl.createControl( DatabaseUnprocessedArtifactConsumer.class );
processArtifactConsumer = ( DatabaseUnprocessedArtifactConsumer ) unprocessedConsumersControl.getMock();
processPomConsumer = ( DatabaseUnprocessedArtifactConsumer ) unprocessedConsumersControl.getMock();
recordDbConsumers();
dbConsumersUtilControl.replay();
- cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
List<String> dbConsumers = service.getAllDatabaseConsumers();
dbConsumersUtilControl.verify();
- cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
assertNotNull( dbConsumers );
- assertEquals( 4, dbConsumers.size() );
- assertTrue( dbConsumers.contains( "cleanup-index" ) );
- assertTrue( dbConsumers.contains( "cleanup-database" ) );
+ assertEquals( 2, dbConsumers.size() );
assertTrue( dbConsumers.contains( "process-artifact" ) );
assertTrue( dbConsumers.contains( "process-pom" ) );
}
archivaConfigControl.setVoidCallable();
dbConsumersUtilControl.replay();
- cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
archivaConfigControl.replay();
configControl.replay();
}
dbConsumersUtilControl.verify();
- cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
archivaConfigControl.verify();
configControl.verify();
// test disable "process-pom" db consumer
dbConsumersUtilControl.reset();
- cleanupConsumersControl.reset();
unprocessedConsumersControl.reset();
archivaConfigControl.reset();
configControl.reset();
archivaConfigControl.setVoidCallable();
dbConsumersUtilControl.replay();
- cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
archivaConfigControl.replay();
configControl.replay();
}
dbConsumersUtilControl.verify();
- cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
archivaConfigControl.verify();
configControl.verify();
recordDbConsumers();
dbConsumersUtilControl.replay();
- cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
try
}
dbConsumersUtilControl.verify();
- cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
}
private void recordDbConsumers()
{
- List<DatabaseCleanupConsumer> cleanupConsumers = new ArrayList<DatabaseCleanupConsumer>();
- cleanupConsumers.add( cleanupIndexConsumer );
- cleanupConsumers.add( cleanupDbConsumer );
-
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers =
new ArrayList<DatabaseUnprocessedArtifactConsumer>();
unprocessedConsumers.add( processArtifactConsumer );
unprocessedConsumers.add( processPomConsumer );
- dbConsumersUtilControl.expectAndReturn( dbConsumersUtil.getAvailableCleanupConsumers(), cleanupConsumers );
- cleanupConsumersControl.expectAndReturn( cleanupIndexConsumer.getId(), "cleanup-index" );
- cleanupConsumersControl.expectAndReturn( cleanupDbConsumer.getId(), "cleanup-database" );
-
dbConsumersUtilControl.expectAndReturn( dbConsumersUtil.getAvailableUnprocessedConsumers(), unprocessedConsumers );
unprocessedConsumersControl.expectAndReturn( processArtifactConsumer.getId(), "process-artifact" );
unprocessedConsumersControl.expectAndReturn( processPomConsumer.getId(), "process-pom" );