* New Artifacts In Repository
- The default url for this feed is: <<<http://[hostname]:[port]/archiva/rss/rss_feeds?repoId=[repositoryId]>>>
+ For versions 1.1 and 1.1.1, the default url for this feed is <<<http://[hostname]:[port]/archiva/rss/rss_feeds?repoId=[repositoryId]>>>.
+ But since version 1.1.2, this has been changed to <<<http://[hostname]:[port]/archiva/feeds/[repositoryId]>>>
There are two ways to subscribe to this feed:
* New Versions of an Artifact
- The default url for this feed is: <<<http://[hostname]:[port]/archiva/rss/rss_feeds?groupId=[groupId]&artifactId=[artifactId]>>>
+ The default url for this feed for versions 1.1 and 1.1.1 is <<<http://[hostname]:[port]/archiva/rss/rss_feeds?groupId=[groupId]&artifactId=[artifactId]>>>
+ but was changed to <<<http://[hostname]:[port]/archiva/feeds/[groupId]/[artifactId]>>> since version 1.1.2 for consistency purposes.
You could subscribe to the feed the same way as specified in the New Artifacts In Repository section except that in #1, you
need to specify the <<<groupId>>> and <<<artifactId>>> instead of the <<<repositoryId>>>. And in #2, you need to go to the
note that only users with an Observer role for the specific repository would be able to subscribe to this feed.
If the 'guest' account is enabled for the repository, you would no longer be asked for the username and password when you
- subscribe to the feed.
\ No newline at end of file
+ subscribe to the feed.
+
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>appassembler-maven-plugin</artifactId>
- <version>1.0-SNAPSHOT</version>
+ <version>1.0-beta-2</version>
<configuration>
<daemons>
<daemon>
<name>Archiva Consumers :: Core Consumers</name>
<dependencies>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-database</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-indexer</artifactId>
- </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId>
* under the License.
*/
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
private static final String TYPE_CHECKSUM_CANNOT_CREATE = "checksum-create-failure";
private File repositoryDir;
-
- private List<String> propertyNameTriggers = new ArrayList<String>();
-
+
private List<String> includes = new ArrayList<String>();
public String getId()
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- if ( propertyNameTriggers.contains( propertyName ) )
+ {
+ if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
{
initIncludes();
}
public void initialize()
throws InitializationException
{
- propertyNameTriggers = new ArrayList<String>();
- propertyNameTriggers.add( "repositoryScanning" );
- propertyNameTriggers.add( "fileTypes" );
- propertyNameTriggers.add( "fileType" );
- propertyNameTriggers.add( "patterns" );
- propertyNameTriggers.add( "pattern" );
-
configuration.addChangeListener( this );
-
+
initIncludes();
}
}
* under the License.
*/
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
private FileTypes filetypes;
private File repositoryDir;
-
- private List<String> propertyNameTriggers = new ArrayList<String>();
-
+
private List<String> includes = new ArrayList<String>();
public String getId()
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
- {
- if ( propertyNameTriggers.contains( propertyName ) )
+ {
+ if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
{
initIncludes();
}
public void initialize()
throws InitializationException
- {
- propertyNameTriggers = new ArrayList<String>();
- propertyNameTriggers.add( "repositoryScanning" );
- propertyNameTriggers.add( "fileTypes" );
- propertyNameTriggers.add( "fileType" );
- propertyNameTriggers.add( "patterns" );
- propertyNameTriggers.add( "pattern" );
-
+ {
configuration.addChangeListener( this );
initIncludes();
* under the License.
*/
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
-import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
-import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
-import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
-import org.apache.maven.archiva.model.ArchivaArtifact;
-import org.apache.maven.archiva.model.ArtifactReference;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.layout.LayoutException;
-
import java.io.File;
import java.io.FilenameFilter;
-import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import java.util.Set;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
/**
* Base class for all repository purge tasks.
*
public abstract class AbstractRepositoryPurge
implements RepositoryPurge
{
- protected ManagedRepositoryContent repository;
-
- protected ArtifactDAO artifactDao;
-
- private Map<String, RepositoryContentIndex> indices;
+ protected final ManagedRepositoryContent repository;
+
+ protected final List<RepositoryListener> listeners;
- public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- Map<String, RepositoryContentIndex> indices )
+ public AbstractRepositoryPurge( ManagedRepositoryContent repository, List<RepositoryListener> listeners )
{
this.repository = repository;
- this.artifactDao = artifactDao;
- this.indices = indices;
+ this.listeners = listeners;
}
/**
{
if( references != null && !references.isEmpty() )
{
- List<LuceneRepositoryContentRecord> fileContentRecords = new ArrayList<LuceneRepositoryContentRecord>();
- List<LuceneRepositoryContentRecord> hashcodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
- List<LuceneRepositoryContentRecord> bytecodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
-
for ( ArtifactReference reference : references )
{
File artifactFile = repository.toFile( reference );
new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
reference.getClassifier(), reference.getType() );
- FileContentRecord fileContentRecord = new FileContentRecord();
- fileContentRecord.setFilename( repository.toPath( artifact ) );
- fileContentRecords.add( fileContentRecord );
-
- HashcodesRecord hashcodesRecord = new HashcodesRecord();
- hashcodesRecord.setArtifact( artifact );
- hashcodeRecords.add( hashcodesRecord );
-
- BytecodeRecord bytecodeRecord = new BytecodeRecord();
- bytecodeRecord.setArtifact( artifact );
- bytecodeRecords.add( bytecodeRecord );
-
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.deleteArtifact( repository, artifact );
+ }
+
// TODO: this needs to be logged
artifactFile.delete();
purgeSupportFiles( artifactFile );
-
- // intended to be swallowed
- // continue updating the database for all artifacts
- try
- {
- String artifactPath = toRelativePath( artifactFile );
- updateDatabase( artifactPath );
- }
- catch ( ArchivaDatabaseException ae )
- {
- // TODO: determine logging to be used
- }
- catch ( LayoutException le )
- {
- // Ignore
- }
- }
-
- try
- {
- updateIndices( fileContentRecords, hashcodeRecords, bytecodeRecords );
- }
- catch ( RepositoryIndexException e )
- {
- // Ignore
}
}
}
}
}
}
-
- private void updateDatabase( String path )
- throws ArchivaDatabaseException, LayoutException
- {
- ArtifactReference artifact = repository.toArtifactReference( path );
- ArchivaArtifact queriedArtifact =
- artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
- artifact.getClassifier(), artifact.getType() );
-
- artifactDao.deleteArtifact( queriedArtifact );
-
- // TODO [MRM-37]: re-run the database consumers to clean up
- }
-
- private void updateIndices( List<LuceneRepositoryContentRecord> fileContentRecords,
- List<LuceneRepositoryContentRecord> hashcodeRecords,
- List<LuceneRepositoryContentRecord> bytecodeRecords )
- throws RepositoryIndexException
- {
- RepositoryContentIndex index = indices.get( "filecontent" );
- index.deleteRecords( fileContentRecords );
-
- index = indices.get( "hashcodes" );
- index.deleteRecords( hashcodeRecords );
-
- index = indices.get( "bytecode" );
- index.deleteRecords( bytecodeRecords );
- }
}
* under the License.
*/
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
+import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
/**
* <p>
* This will look in a single managed repository, and purge any snapshots that are present
private RepositoryContentFactory repoContentFactory;
- public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- MetadataTools metadataTools, Map<String, RepositoryContentIndex> indices,
- ArchivaConfiguration archivaConfig, RepositoryContentFactory repoContentFactory )
+ public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, MetadataTools metadataTools,
+ ArchivaConfiguration archivaConfig,
+ RepositoryContentFactory repoContentFactory,
+ List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.metadataTools = metadataTools;
this.archivaConfig = archivaConfig;
this.repoContentFactory = repoContentFactory;
return;
}
- ArtifactReference artifact = repository.toArtifactReference( path );
+ ArtifactReference artifactRef = repository.toArtifactReference( path );
- if ( !VersionUtil.isSnapshot( artifact.getVersion() ) )
+ if ( !VersionUtil.isSnapshot( artifactRef.getVersion() ) )
{
// Nothing to do here, not a snapshot, skip it.
return;
}
ProjectReference reference = new ProjectReference();
- reference.setGroupId( artifact.getGroupId() );
- reference.setArtifactId( artifact.getArtifactId() );
+ reference.setGroupId( artifactRef.getGroupId() );
+ reference.setArtifactId( artifactRef.getArtifactId() );
// Gather up all of the versions.
List<String> allVersions = new ArrayList<String>( repository.getVersions( reference ) );
boolean needsMetadataUpdate = false;
VersionedReference versionRef = new VersionedReference();
- versionRef.setGroupId( artifact.getGroupId() );
- versionRef.setArtifactId( artifact.getArtifactId() );
+ versionRef.setGroupId( artifactRef.getGroupId() );
+ versionRef.setArtifactId( artifactRef.getArtifactId() );
+
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion(),
+ artifactRef.getClassifier(), artifactRef.getType() );
for ( String version : snapshotVersions )
{
{
versionRef.setVersion( version );
repository.deleteVersion( versionRef );
+
+ for ( RepositoryListener listener : listeners )
+ {
+ listener.deleteArtifact( repository, artifact );
+ }
+
needsMetadataUpdate = true;
}
}
if ( needsMetadataUpdate )
{
- updateMetadata( artifact );
+ updateMetadata( artifactRef );
}
}
catch ( LayoutException e )
* under the License.
*/
-import org.apache.commons.lang.time.DateUtils;
-import org.apache.maven.archiva.common.utils.VersionComparator;
-import org.apache.maven.archiva.common.utils.VersionUtil;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.model.ArtifactReference;
-import org.apache.maven.archiva.model.VersionedReference;
-import org.apache.maven.archiva.repository.ContentNotFoundException;
-import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.apache.maven.archiva.repository.layout.LayoutException;
-
import java.io.File;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.maven.archiva.common.utils.VersionComparator;
+import org.apache.maven.archiva.common.utils.VersionUtil;
+import org.apache.maven.archiva.model.ArtifactReference;
+import org.apache.maven.archiva.model.VersionedReference;
+import org.apache.maven.archiva.repository.ContentNotFoundException;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.apache.maven.archiva.repository.layout.LayoutException;
+
/**
* Purge from repository all snapshots older than the specified days in the repository configuration.
*
private int retentionCount;
- public DaysOldRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao, int daysOlder,
- int retentionCount, Map<String, RepositoryContentIndex> indices )
+ public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder,
+ int retentionCount, List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.daysOlder = daysOlder;
this.retentionCount = retentionCount;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
* under the License.
*/
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
-import org.apache.maven.archiva.database.ArchivaDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import edu.emory.mathcs.backport.java.util.Collections;
/**
* Consumer for removing old snapshots in the repository based on the criteria
*/
private ArchivaConfiguration configuration;
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArchivaDAO dao;
-
/**
* @plexus.requirement
*/
private List<String> includes = new ArrayList<String>();
- private List<String> propertyNameTriggers = new ArrayList<String>();
-
private RepositoryPurge repoPurge;
private RepositoryPurge cleanUp;
private boolean deleteReleasedSnapshots;
-
- /**
- * @plexus.requirement role-hint="lucene"
- */
- private RepositoryContentIndexFactory indexFactory;
+ /** @plexus.requirement role="org.apache.maven.archiva.repository.events.RepositoryListener" */
+ private List<RepositoryListener> listeners = Collections.emptyList();
+
public String getId()
{
return this.id;
{
try
{
- Map<String, RepositoryContentIndex> indices = new HashMap<String, RepositoryContentIndex>();
- indices.put( "bytecode", indexFactory.createBytecodeIndex( repository ) );
- indices.put( "hashcodes", indexFactory.createHashcodeIndex( repository ) );
- indices.put( "filecontent", indexFactory.createFileContentIndex( repository ) );
-
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
.getId() );
if ( repository.getDaysOlder() != 0 )
{
- repoPurge = new DaysOldRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getDaysOlder(), repository.getRetentionCount(), indices );
+ repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
+ repository.getRetentionCount(), listeners );
}
else
{
- repoPurge = new RetentionCountRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository
- .getRetentionCount(), indices );
+ repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
+ listeners );
}
- cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, dao.getArtifactDAO(),
- metadataTools, indices, configuration, repositoryFactory );
+ cleanUp =
+ new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
+ repositoryFactory, listeners );
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
- if ( propertyNameTriggers.contains( propertyName ) )
+ if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
{
initIncludes();
}
public void initialize()
throws InitializationException
{
- propertyNameTriggers = new ArrayList<String>();
- propertyNameTriggers.add( "repositoryScanning" );
- propertyNameTriggers.add( "fileTypes" );
- propertyNameTriggers.add( "fileType" );
- propertyNameTriggers.add( "patterns" );
- propertyNameTriggers.add( "pattern" );
-
configuration.addChangeListener( this );
initIncludes();
// we need to check all files for deletion, especially if not modified
return true;
}
-
- public void setRepositoryContentIndexFactory( RepositoryContentIndexFactory indexFactory )
- {
- this.indexFactory = indexFactory;
- }
}
* under the License.
*/
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
-import org.apache.maven.archiva.database.ArtifactDAO;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
/**
* Purge the repository by retention count. Retain only the specified number of snapshots.
*
{
private int retentionCount;
- public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
- int retentionCount, Map<String, RepositoryContentIndex> indices )
+ public RetentionCountRepositoryPurge( ManagedRepositoryContent repository,
+ int retentionCount, List<RepositoryListener> listeners )
{
- super( repository, artifactDao, indices );
+ super( repository, listeners );
this.retentionCount = retentionCount;
}
* under the License.
*/
+import java.io.File;
+import java.io.IOException;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
-import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
-import org.codehaus.plexus.jdo.JdoFactory;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.jpox.SchemaTool;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Properties;
-import java.util.Map.Entry;
-
-import javax.jdo.PersistenceManager;
-import javax.jdo.PersistenceManagerFactory;
+import org.easymock.MockControl;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
private ManagedRepositoryContent repo;
- protected ArtifactDAO dao;
-
protected RepositoryPurge repoPurge;
+ protected MockControl listenerControl;
+
+ protected RepositoryListener listener;
+
+ @Override
protected void setUp()
throws Exception
{
super.setUp();
+
+ listenerControl = MockControl.createControl( RepositoryListener.class );
- DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
- assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
-
- jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
-
- jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
- jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:testdb" ) );
-
- jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
-
- jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
-
- jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
-
- jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
-
- jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
-
- jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
-
- jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
-
- // jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateTables", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
-
- jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
-
- Properties properties = jdoFactory.getProperties();
-
- for ( Entry<Object, Object> entry : properties.entrySet() )
- {
- System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
- }
-
- URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
-
- if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
- {
- fail( "Unable to process test " + getName() + " - missing package.jdo." );
- }
-
- File propsFile = null; // intentional
- boolean verbose = true;
-
- SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
- SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
-
- PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
-
- assertNotNull( pmf );
-
- PersistenceManager pm = pmf.getPersistenceManager();
-
- pm.close();
-
- dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
+ listener = (RepositoryListener) listenerControl.getMock();
}
@Override
return repo;
}
- protected void populateDb( String groupId, String artifactId, List<String> versions )
- throws ArchivaDatabaseException
- {
- for ( String version : versions )
- {
- ArchivaArtifact artifact = dao.createArtifact( groupId, artifactId, version, "", "jar" );
- assertNotNull( artifact );
- artifact.getModel().setLastModified( new Date() );
- artifact.getModel().setOrigin( "test" );
- ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
- assertNotNull( savedArtifact );
-
- //POM
- artifact = dao.createArtifact( groupId, artifactId, version, "", "pom" );
- assertNotNull( artifact );
- artifact.getModel().setLastModified( new Date() );
- artifact.getModel().setOrigin( "test" );
- savedArtifact = dao.saveArtifact( artifact );
- assertNotNull( savedArtifact );
- }
- }
-
protected void assertDeleted( String path )
{
assertFalse( "File should have been deleted: " + path, new File( path ).exists() );
return testDir.getAbsolutePath();
}
-
- protected void populateDbForTestOrderOfDeletion()
- throws Exception
+
+ protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
- List<String> versions = new ArrayList<String>();
- versions.add( "1.1.2-20070427.065136-1" );
- versions.add( "1.1.2-20070506.163513-2" );
- versions.add( "1.1.2-20070615.105019-3" );
-
- populateDb( "org.apache.maven.plugins", "maven-assembly-plugin", versions );
+ return new ArchivaArtifact( groupId, artifactId, version, null, type );
}
}
* under the License.
*/
+import java.io.File;
+import java.util.Collections;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.custommonkey.xmlunit.XMLAssert;
+import org.easymock.MockControl;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
extends AbstractRepositoryPurgeTest
{
private ArchivaConfiguration archivaConfiguration;
+
+ private MockControl listenerControl;
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
+
+ private RepositoryListener listener;
protected void setUp()
throws Exception
{
- super.setUp();
-
- Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
+ super.setUp();
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots");
archivaConfiguration =
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" );
-
+
+ listenerControl = MockControl.createControl( RepositoryListener.class );
+
+ listener = (RepositoryListener) listenerControl.getMock();
repoPurge =
- new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools, map, archivaConfiguration, factory );
+ new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration, factory,
+ Collections.singletonList( listener ) );
}
public void testReleasedSnapshotsExistsInSameRepo()
throws Exception
{
-
Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
- populateReleasedSnapshotsTest();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-plugin-plugin",
+ "2.3-SNAPSHOT", "maven-plugin" ) );
+ listenerControl.replay();
+
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
+
+ listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) );
- populateReleasedSnapshotsTestInDiffRepo();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.archiva",
+ "released-artifact-in-diff-repo", "1.0-SNAPSHOT",
+ "jar" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO );
+ listenerControl.verify();
+
String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo";
// check if the snapshot was removed
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
- populateHigherSnapshotExistsTest();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts - no deletions
+ listenerControl.replay();
+
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO );
+
+ listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin";
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070427033345", "//metadata/versioning/lastUpdated", metadataXml );
}
-
- private void populateReleasedSnapshotsTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
- }
-
- private void populateHigherSnapshotExistsTest()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.0.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-source-plugin", versions );
- }
-
- private void populateReleasedSnapshotsTestInDiffRepo()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0-SNAPSHOT" );
-
- populateDb( "org.apache.archiva", "released-artifact-in-diff-repo", versions );
- }
-
}
* under the License.
*/
-import org.apache.commons.lang.time.DateUtils;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
-import java.util.HashMap;
+import java.util.Collections;
import java.util.List;
-import java.util.Map;
+
+import org.apache.commons.lang.time.DateUtils;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
public class DaysOldRepositoryPurgeTest
extends AbstractRepositoryPurgeTest
{
-
- private Map<String, RepositoryContentIndex> map;
-
private static final String[] extensions =
new String[] { "-5.jar", "-5.pom", "-6.jar", "-6.pom", "-7.jar", "-7.pom" };
private String sec;
- protected void setUp()
- throws Exception
- {
- super.setUp();
- }
-
private void setLastModified( String dirPath, long lastModified )
{
File dir = new File( dirPath );
public void testByLastModified()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/2.2-SNAPSHOT/", 1179382029 );
- populateDbForTestByLastModified();
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
+ "2.2-SNAPSHOT", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
+ "2.2-SNAPSHOT", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
+
+ listenerControl.verify();
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar" );
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.md5" );
public void testOrderOfDeletion()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(), getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", 1179382029 );
- populateDbForTestOrderOfDeletion();
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
+ listenerControl.verify();
+
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" );
public void testMetadataDrivenSnapshots()
throws Exception
{
- map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
repoPurge =
- new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ new DaysOldRepositoryPurge( getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos();
versions.add( "1.4.3-" + year + mon + day + "." + hr + min + sec + "-7" );
versions.add( "1.4.3-SNAPSHOT" );
- populateDb( "org.codehaus.plexus", "plexus-utils", versions );
-
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
+ "1.4.3-20070113.163208-4", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
+ "1.4.3-20070113.163208-4", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT );
+ listenerControl.verify();
+
// this should be deleted since the filename version (timestamp) is older than
// 100 days even if the last modified date was <100 days ago
assertDeleted( versionRoot + "/plexus-utils-1.4.3-20070113.163208-4.jar" );
super.tearDown();
repoPurge = null;
}
-
- private void populateDbForTestByLastModified()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.2-20061118.060401-2" );
- versions.add( "2.2-20070513.034619-5" );
- versions.add( "2.2-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
- }
}
* under the License.
*/
+import java.io.File;
+
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.BaseFile;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.custommonkey.xmlunit.XMLAssert;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" );
- LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
- indexFactory.setExpectedRecordsSize( 2 );
-
- ( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
-
- populateDbForRetentionCountTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
repoConfiguration.setRetentionCount( TEST_RETENTION_COUNT );
public void testConsumerByDaysOld()
throws Exception
{
- populateDbForDaysOldTest();
-
KnownRepositoryContentConsumer repoPurgeConsumer =
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" );
- LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
- indexFactory.setExpectedRecordsSize( 2 );
-
- ( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
addRepoToConfiguration( "days-old", repoConfiguration );
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" );
- populateDbForReleasedSnapshotsTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
addRepoToConfiguration( "retention-count", repoConfiguration );
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" );
- populateDbForReleasedSnapshotsTest();
-
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( true );
addRepoToConfiguration( "days-old", repoConfiguration );
"//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
}
-
- public void populateDbForRetentionCountTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0RC1-20070504.153317-1" );
- versions.add( "1.0RC1-20070504.160758-2" );
- versions.add( "1.0RC1-20070505.090015-3" );
- versions.add( "1.0RC1-20070506.090132-4" );
-
- populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
- }
-
- private void populateDbForDaysOldTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.2-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
- }
-
- public void populateDbForReleasedSnapshotsTest()
- throws ArchivaDatabaseException
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "2.3-SNAPSHOT" );
-
- populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
- }
}
package org.apache.maven.archiva.consumers.core.repository;
+import java.util.Collections;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* under the License.
*/
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-
/**
* Test RetentionsCountRepositoryPurgeTest
*
{
super.setUp();
- Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
- map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
- map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
-
- repoPurge = new RetentionCountRepositoryPurge( getRepository(), dao,
- getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
+ repoPurge =
+ new RetentionCountRepositoryPurge(
+ getRepository(),
+ getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
+ Collections.singletonList( listener ) );
}
/**
public void testIfAJarWasFound()
throws Exception
{
- populateIfJarWasFoundDb();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.153317-1", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.153317-1", "pom" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.160758-2", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
+ "1.0RC1-20070504.160758-2", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
+ listenerControl.verify();
+
String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT";
// assert if removed from repo
public void testIfAPomWasFound()
throws Exception
{
- populateIfPomWasFoundDb();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
+ "1.1.2-20070427.065136-1", "jar" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
+
+ listenerControl.verify();
String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT";
public void testOrderOfDeletion()
throws Exception
{
- populateDbForTestOrderOfDeletion();
-
String repoRoot = prepareTestRepos();
+ // test listeners for the correct artifacts
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "maven-plugin" ) );
+ listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
+ "1.1.2-20070427.065136-1", "pom" ) );
+ listenerControl.replay();
+
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
+ listenerControl.verify();
+
String versionRoot = repoRoot +
"/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.sha1" );
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.md5" );
}
-
- public void populateIfJarWasFoundDb()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.0RC1-20070504.153317-1" );
- versions.add( "1.0RC1-20070504.160758-2" );
- versions.add( "1.0RC1-20070505.090015-3" );
- versions.add( "1.0RC1-20070506.090132-4" );
-
- populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
- }
-
- public void populateIfPomWasFoundDb()
- throws Exception
- {
- List<String> versions = new ArrayList<String>();
- versions.add( "1.1.2-20070427.065136-1" );
- versions.add( "1.1.2-20070615.105019-3" );
- versions.add( "1.1.2-20070506.163513-2" );
-
- populateDb( "org.codehaus.castor", "castor-anttasks", versions );
- }
}
+++ /dev/null
-package org.apache.maven.archiva.consumers.core.repository.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
-
-/**
- * LuceneRepositoryContenIndexFactoryStub
- *
- * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version
- */
-public class LuceneRepositoryContentIndexFactoryStub
- implements RepositoryContentIndexFactory
-{
-
- private int expectedRecordsSize = 0;
-
- public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
- {
- // TODO Auto-generated method stub
- return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
- }
-
- public void setExpectedRecordsSize( int size )
- {
- expectedRecordsSize = size;
- }
-}
+++ /dev/null
-package org.apache.maven.archiva.consumers.core.repository.stubs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.File;
-import java.util.Collection;
-
-import junit.framework.Assert;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.queryParser.QueryParser;
-import org.apache.lucene.search.Searchable;
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.indexer.RepositoryContentIndex;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
-import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
-import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
-
-/**
- * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
- * @version
- */
-public class LuceneRepositoryContentIndexStub
- implements RepositoryContentIndex
-{
- private int expectedRecordsSize;
-
- public LuceneRepositoryContentIndexStub()
- {
-
- }
-
- public LuceneRepositoryContentIndexStub( int size )
- {
- expectedRecordsSize = size;
- }
-
- public void deleteRecords( Collection records )
- throws RepositoryIndexException
- {
- Assert.assertEquals( expectedRecordsSize, records.size() );
- }
-
- public boolean exists()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return false;
- }
-
- public Collection getAllRecordKeys()
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Analyzer getAnalyzer()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public LuceneEntryConverter getEntryConverter()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public String getId()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public File getIndexDirectory()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public QueryParser getQueryParser()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public ManagedRepositoryConfiguration getRepository()
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public Searchable getSearchable()
- throws RepositoryIndexSearchException
- {
- // TODO Auto-generated method stub
- return null;
- }
-
- public void indexRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void modifyRecords( Collection records )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
- public void deleteRecord( LuceneRepositoryContentRecord record )
- throws RepositoryIndexException
- {
- // TODO Auto-generated method stub
-
- }
-
-}
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>retention-count</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>retention-count</role-hint>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>retention-count</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <field-name>indexFactory</field-name>
- </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>days-old</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>days-old</role-hint>
<role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>days-old</role-hint>
</requirement>
- <requirement>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <field-name>indexFactory</field-name>
- </requirement>
</requirements>
<configuration>
<id>repository-purge</id>
</requirement>
</requirements>
</component>
-
-
- <!-- DAOs -->
- <component>
- <role>org.apache.maven.archiva.database.ArchivaDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- <requirement>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.ArtifactDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.ProjectModelDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
- <requirements>
- <requirement>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
-
- <!-- JdoAccess -->
- <component>
- <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
- <role-hint>archiva</role-hint>
- <implementation>org.apache.maven.archiva.database.jdo.JdoAccess</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.jdo.JdoFactory</role>
- <role-hint>archiva</role-hint>
- </requirement>
- </requirements>
- </component>
-
- <!-- JDO Factory -->
- <component>
- <role>org.codehaus.plexus.jdo.JdoFactory</role>
- <role-hint>archiva</role-hint>
- <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
- <configuration>
- <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
- <driverName>org.hsqldb.jdbcDriver</driverName>
- <userName>sa</userName>
- <password></password>
- <url>jdbc:hsqldb:mem:testdb</url>
- <otherProperties>
- <property>
- <name>javax.jdo.PersistenceManagerFactoryClass</name>
- <value>org.jpox.PersistenceManagerFactoryImpl</value>
- </property>
- </otherProperties>
- </configuration>
- </component>
-
- <!-- LuceneRepositoryIndexFactory -->
- <component>
- <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
- <role-hint>lucene</role-hint>
- <implementation>org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
- </component>
-
</components>
</component-set>
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
-
- private List<String> propertyNameTriggers = new ArrayList<String>();
-
+
private List<String> includes = new ArrayList<String>();
private RepositoryContentIndex index;
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
- if ( propertyNameTriggers.contains( propertyName ) )
+ if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
{
initIncludes();
}
public void initialize()
throws InitializationException
{
- propertyNameTriggers = new ArrayList<String>();
- propertyNameTriggers.add( "repositoryScanning" );
- propertyNameTriggers.add( "fileTypes" );
- propertyNameTriggers.add( "fileType" );
- propertyNameTriggers.add( "patterns" );
- propertyNameTriggers.add( "pattern" );
-
configuration.addChangeListener( this );
initIncludes();
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
+/**
+ * Process repository management events and respond appropriately.
+ *
+ * @todo creating index instances every time is inefficient, the plugin needs to have a repository context to operate in
+ * @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="indexer"
+ */
+public class RepositoryContentIndexEventListener
+ implements RepositoryListener
+{
+ /**
+ * @plexus.requirement role-hint="lucene"
+ */
+ private RepositoryContentIndexFactory indexFactory;
+
+ public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
+ {
+ try
+ {
+ RepositoryContentIndex index = indexFactory.createFileContentIndex( repository.getRepository() );
+ FileContentRecord fileContentRecord = new FileContentRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ fileContentRecord.setFilename( repository.toPath( artifact ) );
+ index.deleteRecord( fileContentRecord );
+
+ index = indexFactory.createHashcodeIndex( repository.getRepository() );
+ HashcodesRecord hashcodesRecord = new HashcodesRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ hashcodesRecord.setArtifact( artifact );
+ index.deleteRecord( hashcodesRecord );
+
+ index = indexFactory.createBytecodeIndex( repository.getRepository() );
+ BytecodeRecord bytecodeRecord = new BytecodeRecord();
+ fileContentRecord.setRepositoryId( repository.getRepository().getId() );
+ bytecodeRecord.setArtifact( artifact );
+ index.deleteRecord( bytecodeRecord );
+ }
+ catch ( RepositoryIndexException e )
+ {
+ // Ignore
+ }
+ }
+}
* @return the results.
*/
public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits );
+
+ /**
+ * Search for a specific artifact matching the given field values. The search is performed on the bytecode
+ * index/indices.
+ *
+ * @param principal
+ * @param selectedRepos repository to be searched
+ * @param groupId groupId to be matched
+ * @param artifactId artifactId to be matched
+ * @param version version to be matched
+ * @param className Java class or package name to be matched
+ * @param limits the limits to apply to the search results
+ * @return
+ */
+ public SearchResults executeFilteredSearch( String principal, List<String> selectedRepos, String groupId,
+ String artifactId, String version, String className,
+ SearchResultLimits limits );
}
import java.util.List;
import org.apache.lucene.document.Document;
+import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MultiSearcher;
+import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Searchable;
+import org.apache.lucene.search.TermQuery;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.indexer.ArtifactKeys;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
+import org.apache.maven.archiva.indexer.bytecode.BytecodeKeys;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
/**
* DefaultCrossRepositorySearch
- *
+ *
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default"
implements CrossRepositorySearch, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( DefaultCrossRepositorySearch.class );
-
+
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
-
+
/**
* @plexus.requirement
*/
private List<ManagedRepositoryConfiguration> localIndexedRepositories = new ArrayList<ManagedRepositoryConfiguration>();
- public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum, SearchResultLimits limits )
+ public SearchResults executeFilteredSearch( String principal, List<String> selectedRepos, String groupId,
+ String artifactId, String version, String className,
+ SearchResultLimits limits )
+ {
+ List<RepositoryContentIndex> indexes = getBytecodeIndexes( principal, selectedRepos );
+ SearchResults results = new SearchResults();
+ BooleanQuery booleanQuery = new BooleanQuery();
+
+ if ( groupId != null && groupId.length() > 0 )
+ {
+ parseAndAdd( booleanQuery, ArtifactKeys.GROUPID, groupId, "\\.|-" );
+ }
+
+ if ( artifactId != null && artifactId.length() > 0 )
+ {
+ parseAndAdd( booleanQuery, ArtifactKeys.ARTIFACTID, artifactId, "\\.|-" );
+ }
+
+ if ( version != null && version.length() > 0 )
+ {
+ parseAndAdd( booleanQuery, ArtifactKeys.VERSION, version, "\\.|-" );
+ }
+
+ if ( className != null && className.length() > 0 )
+ {
+
+ try
+ {
+ QueryParser parser =
+ new MultiFieldQueryParser( new String[] { BytecodeKeys.CLASSES, BytecodeKeys.FILES,
+ BytecodeKeys.METHODS }, new BytecodeHandlers().getAnalyzer() );
+ booleanQuery.add( parser.parse( className ), BooleanClause.Occur.MUST );
+ }
+ catch ( ParseException e )
+ {
+
+ }
+ }
+
+ LuceneQuery query = new LuceneQuery( booleanQuery );
+ results = searchAll( query, limits, indexes, null );
+ results.getRepositories().add( this.localIndexedRepositories );
+
+ return results;
+ }
+
+ public SearchResults searchForChecksum( String principal, List<String> selectedRepos, String checksum,
+ SearchResultLimits limits )
{
List<RepositoryContentIndex> indexes = getHashcodeIndexes( principal, selectedRepos );
try
{
QueryParser parser = new MultiFieldQueryParser( new String[]{HashcodesKeys.MD5, HashcodesKeys.SHA1},
- new HashcodesHandlers().getAnalyzer() );
+ new HashcodesHandlers().getAnalyzer() );
LuceneQuery query = new LuceneQuery( parser.parse( checksum ) );
SearchResults results = searchAll( query, limits, indexes, null );
results.getRepositories().addAll( this.localIndexedRepositories );
return new SearchResults();
}
-
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term, SearchResultLimits limits )
{
return searchForTerm( principal, selectedRepos, term, limits, null );
}
-
+
public SearchResults searchForTerm( String principal, List<String> selectedRepos, String term,
SearchResultLimits limits, List<String> previousSearchTerms )
{
List<RepositoryContentIndex> indexes = getFileContentIndexes( principal, selectedRepos );
-
+
try
{
QueryParser parser = new FileContentHandlers().getQueryParser();
LuceneQuery query = null;
SearchResults results = null;
- if( previousSearchTerms == null || previousSearchTerms.isEmpty() )
+ if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
{
query = new LuceneQuery( parser.parse( term ) );
results = searchAll( query, limits, indexes, null );
{
// AND the previous search terms
BooleanQuery booleanQuery = new BooleanQuery();
- for( String previousSearchTerm : previousSearchTerms )
+ for ( String previousSearchTerm : previousSearchTerms )
{
booleanQuery.add( parser.parse( previousSearchTerm ), BooleanClause.Occur.MUST );
}
-
- query = new LuceneQuery( booleanQuery );
+
+ query = new LuceneQuery( booleanQuery );
Filter filter = new QueryWrapperFilter( parser.parse( term ) );
results = searchAll( query, limits, indexes, filter );
- }
+ }
results.getRepositories().addAll( this.localIndexedRepositories );
-
+
return results;
}
catch ( ParseException e )
}
// empty results.
- return new SearchResults();
+ return new SearchResults();
}
-
+
private SearchResults searchAll( LuceneQuery luceneQuery, SearchResultLimits limits, List<RepositoryContentIndex> indexes, Filter filter )
{
org.apache.lucene.search.Query specificQuery = luceneQuery.getLuceneQuery();
// Perform the search.
Hits hits = null;
- if( filter != null )
+ if ( filter != null )
{
hits = searcher.search( specificQuery, filter );
}
return ret;
}
-
+
private boolean indexExists( RepositoryContentIndex index )
{
try
}
}
+ private void parseAndAdd( BooleanQuery query, String key, String value, String delimiter )
+ {
+ if ( value != null && value.length() > 0 )
+ {
+ String[] terms = value.split( delimiter );
+ for ( int i = 0; i < terms.length; i++ )
+ {
+ Term valueTerm = new Term( key, terms[i] );
+ Query valueQuery = new TermQuery( valueTerm );
+ query.add( valueQuery, BooleanClause.Occur.MUST );
+ }
+ }
+ else
+ {
+ Term valueTerm = new Term( key, value );
+ Query valueQuery = new TermQuery( valueTerm );
+ query.add( valueQuery, BooleanClause.Occur.MUST );
+ }
+ }
+
public void initialize()
throws InitializationException
{
private void addBytecodeHit( BytecodeRecord bytecode )
{
String key = toKey( bytecode.getArtifact() );
-
+
SearchResultHit hit = (SearchResultHit) this.hits.get( key );
if ( hit == null )
key.append( StringUtils.defaultString( artifact.getModel().getRepositoryId() ) ).append( ":" );
key.append( StringUtils.defaultString( artifact.getGroupId() ) ).append( ":" );
- key.append( StringUtils.defaultString( artifact.getArtifactId() ) );
+ key.append( StringUtils.defaultString( artifact.getArtifactId() ) ).append( ":" );
+ key.append( StringUtils.defaultString( artifact.getVersion() ) );
return key.toString();
}
--- /dev/null
+package org.apache.maven.archiva.indexer;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.search.Hits;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Searcher;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
+import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
+import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
+import org.apache.maven.archiva.indexer.search.BytecodeIndexPopulator;
+import org.apache.maven.archiva.indexer.search.FileContentIndexPopulator;
+import org.apache.maven.archiva.indexer.search.HashcodesIndexPopulator;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.codehaus.plexus.spring.PlexusToSpringUtils;
+
+public class RepositoryContentIndexEventListenerTest
+ extends PlexusInSpringTestCase
+{
+ private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";
+
+ private static final String TEST_DEFAULT_REPO_ID = "test-repo";
+
+ private RepositoryListener listener;
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "indexer" );
+ }
+
+ public void testWiring()
+ {
+ List<RepositoryListener> listeners =
+ PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
+ getApplicationContext() );
+
+ assertEquals( 1, listeners.size() );
+ assertEquals( listener, listeners.get( 0 ) );
+ }
+
+ public ArchivaArtifact createArtifact( String artifactId, String version )
+ {
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
+ artifact.getModel().setRepositoryId( "testable_repo" );
+ return artifact;
+ }
+
+ public void testDeleteArtifact()
+ throws Exception
+ {
+ RepositoryContentIndexFactory indexFactory =
+ (RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class.getName(), "lucene" );
+
+ File repoDir = new File( getBasedir(), "src/test/managed-repository" );
+
+ assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
+
+ ManagedRepositoryConfiguration repository =
+ createRepository( TEST_DEFAULT_REPO_ID, TEST_DEFAULT_REPOSITORY_NAME, repoDir );
+
+ File indexLocation = new File( "target/index-events-" + getName() + "/" );
+
+ MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
+
+ ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
+ repoConfig.setId( TEST_DEFAULT_REPO_ID );
+ repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
+ repoConfig.setLocation( repoDir.getAbsolutePath() );
+ repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
+ repoConfig.setScanned( true );
+
+ if ( indexLocation.exists() )
+ {
+ FileUtils.deleteDirectory( indexLocation );
+ }
+
+ config.getConfiguration().addManagedRepository( repoConfig );
+
+ // Create the (empty) indexes.
+ RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
+ RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
+ RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
+
+ // Now populate them.
+ Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
+ indexHashcode.indexRecords( hashcodesMap.values() );
+ assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
+ assertRecordCount( indexHashcode, hashcodesMap.size() );
+
+ Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
+ indexBytecode.indexRecords( bytecodeMap.values() );
+ assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
+ assertRecordCount( indexBytecode, bytecodeMap.size() );
+
+ Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
+ indexContents.indexRecords( contentMap.values() );
+ assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
+ assertRecordCount( indexContents, contentMap.size() );
+
+ ManagedRepositoryContent repositoryContent =
+ (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
+ repositoryContent.setRepository( repository );
+
+ ArchivaArtifact artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" );
+ listener.deleteArtifact( repositoryContent, artifact );
+
+ artifact =
+ new ArchivaArtifact( "org.apache.maven.archiva.record", "test-pom", "1.0", "", "pom" );
+ listener.deleteArtifact( repositoryContent, artifact );
+
+ assertRecordCount( indexHashcode, hashcodesMap.size() - 1 );
+ assertRecordCount( indexBytecode, bytecodeMap.size() - 1 );
+ assertRecordCount( indexContents, contentMap.size() - 1 );
+ }
+
+ protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
+ {
+ ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
+ repo.setId( id );
+ repo.setName( name );
+ repo.setLocation( location.getAbsolutePath() );
+ return repo;
+ }
+
+ private void assertRecordCount( RepositoryContentIndex index, int expectedCount )
+ throws Exception
+ {
+ Query query = new MatchAllDocsQuery();
+ Searcher searcher = (Searcher) index.getSearchable();
+ Hits hits = searcher.search( query );
+ assertEquals( "Expected Record Count for " + index.getId(), expectedCount, hits.length() );
+ }
+}
"org.apache.maven.continuum.web.action.BuildDefinitionAction.isBuildFresh", null, true );
}
+ public void testExecuteFilteredSearch()
+ throws Exception
+ {
+ CrossRepositorySearch search = lookupCrossRepositorySearch();
+
+ String expectedRepos[] = new String[] { TEST_DEFAULT_REPO_ID };
+
+ String expectedResults[] = new String[] { "org1", "org2", "org3", "org4", "org5", "org6", "org7", "org8" };
+
+ String secondExpectedResults[] = new String[] { "continuum-webapp" };
+
+ String thirdExpectedResults[] = new String[] { "archiva-common" };
+
+ // search for groupId
+ assertFilteredSearchResults( expectedRepos, expectedResults, search, "org", null, null, null, 30 );
+
+ // search for groupId and artifactId
+ assertFilteredSearchResults( expectedRepos, secondExpectedResults, search, "org.apache.maven",
+ "continuum-webapp", null, null, 30 );
+
+ // search for groupId , artifactId and version
+ assertFilteredSearchResults( expectedRepos, thirdExpectedResults, search, "org.apache.maven.archiva",
+ "archiva-common", "1.0", null, 30 );
+ }
+
+ private void assertFilteredSearchResults ( String expectedRepos[], String expectedResults[], CrossRepositorySearch search,
+ String groupId, String artifactId, String version, String className , int rowCount )
+ {
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( rowCount );
+
+ List<String> selectedRepos = new ArrayList<String>();
+ selectedRepos.addAll( Arrays.asList( expectedRepos ) );
+
+ SearchResults results = null;
+
+ results = search.executeFilteredSearch( "guest" , selectedRepos, groupId, artifactId, version, className, limits );
+
+ assertNotNull( "Search Results should not be null.", results );
+ assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
+ assertEquals( expectedRepos.length, 1);
+ assertEquals( TEST_DEFAULT_REPO_ID , selectedRepos.get( 0 ) );
+ assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
+ }
+
private void assertSearchResults( String expectedRepos[], String expectedResults[], CrossRepositorySearch search,
String term, List<String> previousSearchTerms, boolean bytecode )
throws Exception
List<String> selectedRepos = new ArrayList<String>();
selectedRepos.addAll( Arrays.asList( expectedRepos ) );
-
+
SearchResults results = null;
+
if( previousSearchTerms == null )
- {
- if( bytecode )
+ {
+ if( bytecode )
{
results = search.searchForBytecode( "guest", selectedRepos, term, limits );
}
{
results = search.searchForTerm( "guest", selectedRepos, term, limits, previousSearchTerms );
}
+
assertNotNull( "Search Results should not be null.", results );
assertEquals( "Repository Hits", expectedRepos.length, results.getRepositories().size() );
+
// TODO: test the repository ids returned.
assertEquals( "Search Result Hits", expectedResults.length, results.getHits().size() );
// TODO: test the order of hits.
// TODO: test the value of the hits.
}
-
+
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
--- /dev/null
+<component-set>
+ <components>
+ <component>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <role-hint>mock</role-hint>
+ <implementation>org.apache.maven.archiva.indexer.MockConfiguration</implementation>
+ </component>
+ <component>
+ <role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
+ <role-hint>lucene</role-hint>
+ <implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
+ <description>Factory for Lucene repository content index instances.</description>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <role-hint>mock</role-hint>
+ <field-name>configuration</field-name>
+ </requirement>
+ </requirements>
+ </component>
+ </components>
+</component-set>
The number of new files discovered.
</description>
</field>
+ <field>
+ <name>totalProjectCount</name>
+ <version>1.0.0+</version>
+ <identifier>false</identifier>
+ <required>true</required>
+ <type>long</type>
+ <description>
+ The total number of unique projects in the repository.
+ </description>
+ </field>
+ <field>
+ <name>totalGroupCount</name>
+ <version>1.0.0+</version>
+ <identifier>false</identifier>
+ <required>true</required>
+ <type>long</type>
+ <description>
+ The total number of unique groups in the repository.
+ </description>
+ </field>
+ <field>
+ <name>totalArtifactCount</name>
+ <version>1.0.0+</version>
+ <identifier>false</identifier>
+ <required>true</required>
+ <type>long</type>
+ <description>
+ The total number of artifacts in the repository.
+ </description>
+ </field>
+ <field>
+ <name>totalSize</name>
+ <version>1.0.0+</version>
+ <identifier>false</identifier>
+ <required>true</required>
+ <type>long</type>
+ <description>
+ The total size in bytes of the repository.
+ </description>
+ </field>
</fields>
<codeSegments>
<codeSegment>
--- /dev/null
+package org.apache.maven.archiva.repository.events;
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Listen to events on the repository. This class is a stopgap
+ * refactoring measure until an event bus is in place to handle
+ * generic events such as these.
+ */
+public interface RepositoryListener
+{
+ /**
+ * Event for the deletion of a given artifact.
+ * @param artifactPath the path to the artifact that was deleted.
+ */
+ void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact );
+}
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- </dependency>
<!-- TEST DEPS -->
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
--- /dev/null
+package org.apache.maven.archiva.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+
+/**
+ * Process repository management events and respond appropriately.
+ *
+ * @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="database"
+ */
+public class RepositoryDatabaseEventListener
+ implements RepositoryListener
+{
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private ArtifactDAO artifactDAO;
+
+ public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
+ {
+ try
+ {
+ ArchivaArtifact queriedArtifact =
+ artifactDAO.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
+ artifact.getClassifier(), artifact.getType() );
+ artifactDAO.deleteArtifact( queriedArtifact );
+ }
+ catch ( ArchivaDatabaseException e )
+ {
+ // ignored
+ }
+
+ // TODO [MRM-37]: re-run the database consumers to clean up
+ }
+}
protected Object[] params;
protected int[] range;
+
+ protected String sortDirection = Constraint.ASCENDING;
public String getFilter()
{
public String getSortDirection()
{
- return Constraint.ASCENDING;
+ return sortDirection;
}
public String[] getVariables()
params = new Object[] { repoId };
}
- public ArtifactsByRepositoryConstraint( String repoId, Date targetWhenGathered, String sortColumn )
+ public ArtifactsByRepositoryConstraint( String repoId, Date targetWhenGathered, String sortColumn, boolean isBefore )
{
+ String condition = isBefore ? "<=" : ">=";
+
declImports = new String[] { "import java.util.Date" };
- whereClause = "this.repositoryId == repoId && this.whenGathered >= targetWhenGathered";
+ whereClause = "this.repositoryId == repoId && this.whenGathered " + condition + " targetWhenGathered";
declParams = new String[] { "String repoId", "Date targetWhenGathered" };
params = new Object[] { repoId, targetWhenGathered };
this.sortColumn = sortColumn;
}
+
+ public ArtifactsByRepositoryConstraint( String repoId, String type, Date before, String sortColumn )
+ {
+ declImports = new String[] { "import java.util.Date" };
+ whereClause =
+ "this.repositoryId == repoId && this.type == type && this.whenGathered <= before";
+ declParams = new String[] { "String repoId", "String type", "Date before" };
+ params = new Object[] { repoId, type, before };
+ this.sortColumn = sortColumn;
+ }
public String getSortColumn()
{
package org.apache.maven.archiva.database.constraints;
+import java.util.Date;
+
+import org.apache.maven.archiva.database.Constraint;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
declParams = new String[] { "String repoId" };
params = new Object[] { repoId };
}
+
+ public RepositoryContentStatisticsByRepositoryConstraint( String repoId, Date startDate, Date endDate )
+ {
+ declImports = new String[] { "import java.util.Date" };
+ whereClause = "repositoryId == repoId && whenGathered >= startDate && whenGathered <= endDate";
+ declParams = new String[] { "String repoId", "Date startDate", "Date endDate" };
+ params = new Object[] { repoId, startDate, endDate };
+
+ sortDirection = Constraint.DESCENDING;
+ }
public String getSortColumn()
{
implements Constraint
{
private StringBuffer sql = new StringBuffer();
-
+
+ private Class resultClass;
+
/**
* Obtain a set of unique ArtifactIds for the specified groupId.
*
*/
public UniqueArtifactIdConstraint( List<String> selectedRepositoryIds, String groupId )
{
- appendSelect( sql );
+ appendSelect( sql, false );
sql.append( " WHERE " );
SqlBuilder.appendWhereSelectedRepositories( sql, "repositoryId", selectedRepositoryIds );
sql.append( " && " );
*/
public UniqueArtifactIdConstraint( String groupId )
{
- appendSelect( sql );
+ appendSelect( sql, false );
sql.append( " WHERE " );
appendWhereSelectedGroupId( sql );
appendGroupBy( sql );
super.params = new Object[] { groupId };
}
+
+ /**
+ * Obtain a set of unique artifactIds with respect to their groups from the specified repository.
+ *
+ * @param repoId
+ * @param isUnique
+ */
+ public UniqueArtifactIdConstraint( String repoId, boolean isUnique )
+ {
+ appendSelect( sql, isUnique );
+ sql.append( " WHERE repositoryId == \"" + repoId + "\"" );
+
+ resultClass = Object[].class;
+ }
@SuppressWarnings("unchecked")
public Class getResultClass()
{
+ if( resultClass != null )
+ {
+ return resultClass;
+ }
+
return String.class;
}
buf.append( " GROUP BY artifactId ORDER BY artifactId ASCENDING" );
}
- private void appendSelect( StringBuffer buf )
+ private void appendSelect( StringBuffer buf, boolean isUnique )
{
- buf.append( "SELECT artifactId FROM " ).append( ArchivaArtifactModel.class.getName() );
+ if( isUnique )
+ {
+ buf.append( "SELECT DISTINCT groupId, artifactId FROM " ).append( ArchivaArtifactModel.class.getName() );
+ }
+ else
+ {
+ buf.append( "SELECT artifactId FROM " ).append( ArchivaArtifactModel.class.getName() );
+ }
}
private void appendWhereSelectedGroupId( StringBuffer buf )
--- /dev/null
+package org.apache.maven.archiva.database;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Date;
+import java.util.List;
+
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.events.RepositoryListener;
+import org.codehaus.plexus.spring.PlexusToSpringUtils;
+
+public class RepositoryDatabaseEventListenerTest
+ extends AbstractArchivaDatabaseTestCase
+{
+ private RepositoryListener listener;
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "database" );
+ }
+
+ public void testWiring()
+ {
+ List<RepositoryListener> listeners =
+ PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
+ getApplicationContext() );
+
+ assertEquals( 1, listeners.size() );
+ assertEquals( listener, listeners.get( 0 ) );
+ }
+
+ public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
+ {
+ ArchivaArtifact artifact =
+ artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
+ artifact.getModel().setLastModified( new Date() );
+ artifact.getModel().setRepositoryId( "testable_repo" );
+ return artifact;
+ }
+
+ public void testDeleteArtifact()
+ throws Exception
+ {
+ ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
+
+ // Setup artifacts in fresh DB.
+ ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao );
+ artifactDao.saveArtifact( artifact );
+
+ assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
+ "jar" ) );
+
+ artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
+ ManagedRepositoryContent repository =
+ (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
+ listener.deleteArtifact( repository, artifact );
+
+ try
+ {
+ artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
+ fail( "Should not find artifact" );
+ }
+ catch ( ObjectNotFoundException e )
+ {
+ assertTrue( true );
+ }
+ }
+}
public class ArtifactsByRepositoryConstraintTest
extends AbstractArchivaDatabaseTestCase
{
- private ArtifactDAO artifactDao;
+ private ArtifactDAO artifactDao;
public void setUp()
throws Exception
{
- super.setUp();
+ super.setUp();
ArchivaDAO dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
artifactDao = dao.getArtifactDAO();
}
- private ArchivaArtifact createArtifact( String groupId, String artifactId, String version )
+ private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
- ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, "jar" );
+ ArchivaArtifact artifact = artifactDao.createArtifact( groupId, artifactId, version, null, type );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "test-repo" );
Date whenGathered = Calendar.getInstance().getTime();
whenGathered.setTime( 123456789 );
- ArchivaArtifact artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0" );
+ ArchivaArtifact artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.1" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.1", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.0.2" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.0.2", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-one", "2.0" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "2.0", "jar" );
artifact.getModel().setRepositoryId( "different-repo" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
{
Date whenGathered = Calendar.getInstance().getTime();
- ArchivaArtifact artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0" );
+ ArchivaArtifact artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.1" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.1", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.2" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.2", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-one", "2.0" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "2.0", "jar" );
artifact.getModel().setRepositoryId( "different-repo" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
Date olderWhenGathered = Calendar.getInstance().getTime();
olderWhenGathered.setTime( 123456789 );
- artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.1-SNAPSHOT" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.1-SNAPSHOT", "jar" );
artifact.getModel().setWhenGathered( olderWhenGathered );
artifactDao.saveArtifact( artifact );
- artifact = createArtifact( "org.apache.archiva", "artifact-three", "2.0-beta-1" );
+ artifact = createArtifact( "org.apache.archiva", "artifact-three", "2.0-beta-1", "jar" );
artifact.getModel().setWhenGathered( whenGathered );
artifactDao.saveArtifact( artifact );
assertConstraint( "Artifacts By Repository and When Gathered", 4,
- new ArtifactsByRepositoryConstraint( "test-repo", whenGathered, "repositoryId" ) );
+ new ArtifactsByRepositoryConstraint( "test-repo", whenGathered, "repositoryId", false ) );
}
+
+ public void testQueryArtifactsInRepoByType()
+ throws Exception
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.set( 2008, 9, 1 );
+ Date whenGathered = cal.getTime();
+
+ ArchivaArtifact artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0", "jar" );
+ artifact.getModel().setWhenGathered( whenGathered );
+ artifactDao.saveArtifact( artifact );
+
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.1", "jar" );
+ artifact.getModel().setWhenGathered( whenGathered );
+ artifactDao.saveArtifact( artifact );
+
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "1.0.2", "jar" );
+ artifact.getModel().setWhenGathered( whenGathered );
+ artifactDao.saveArtifact( artifact );
+
+ artifact = createArtifact( "org.apache.archiva", "artifact-two", "1.1-SNAPSHOT", "war" );
+ artifact.getModel().setWhenGathered( whenGathered );
+ artifactDao.saveArtifact( artifact );
+
+ artifact = createArtifact( "org.apache.archiva", "artifact-three", "2.0-beta-1", "war" );
+ artifact.getModel().setWhenGathered( whenGathered );
+ artifactDao.saveArtifact( artifact );
+ Calendar cal2 = Calendar.getInstance();
+ cal2.set( 2008, 12, 12 );
+ Date diffWhenGathered = cal2.getTime();
+
+ artifact = createArtifact( "org.apache.archiva", "artifact-one", "2.0", "jar" );
+ artifact.getModel().setWhenGathered( diffWhenGathered );
+ artifactDao.saveArtifact( artifact );
+
+ cal2 = Calendar.getInstance();
+ cal2.set( 2008, 10, 30 );
+ Date before = cal2.getTime();
+
+ assertConstraint( "Artifacts of Type 'jar' By Repository and When Gathered", 3,
+ new ArtifactsByRepositoryConstraint( "test-repo", "jar", before, "whenGathered" ) );
+ assertConstraint( "Artifacts of Type 'war' By Repository and When Gathered", 2,
+ new ArtifactsByRepositoryConstraint( "test-repo", "war", before, "whenGathered" ) );
+ }
+
private void assertConstraint( String msg, int count, ArtifactsByRepositoryConstraint constraint )
throws Exception
{
* @version
*/
public class RepositoryContentStatisticsByRepositoryConstraintTest
- extends AbstractArchivaDatabaseTestCase
+ extends AbstractArchivaDatabaseTestCase
{
private RepositoryContentStatistics createStats( String repoId, String timestamp, long duration, long totalfiles,
long newfiles )
assertEquals( "internal", ( (RepositoryContentStatistics) results.get( 2 ) ).getRepositoryId() );
assertEquals( "internal", ( (RepositoryContentStatistics) results.get( 3 ) ).getRepositoryId() );
}
+
+ public void testStatsWithDateRange()
+ throws Exception
+ {
+ Constraint constraint =
+ new RepositoryContentStatisticsByRepositoryConstraint( "internal", toDate( "2007/10/18 8:00:00" ),
+ toDate( "2007/10/20 8:00:00" ) );
+ List results = dao.getRepositoryContentStatisticsDAO().queryRepositoryContentStatistics( constraint );
+ assertNotNull( "Stats: results (not null)", results );
+ assertEquals( "Stats: results.size", 3, results.size() );
+
+ assertEquals( "internal", ( (RepositoryContentStatistics) results.get( 0 ) ).getRepositoryId() );
+ assertEquals( toDate( "2007/10/20 8:00:00" ),
+ ( (RepositoryContentStatistics) results.get( 0 ) ).getWhenGathered() );
+
+ assertEquals( "internal", ( (RepositoryContentStatistics) results.get( 1 ) ).getRepositoryId() );
+ assertEquals( toDate( "2007/10/19 8:00:00" ),
+ ( (RepositoryContentStatistics) results.get( 1 ) ).getWhenGathered() );
+
+ assertEquals( "internal", ( (RepositoryContentStatistics) results.get( 2 ) ).getRepositoryId() );
+ assertEquals( toDate( "2007/10/18 8:00:00" ),
+ ( (RepositoryContentStatistics) results.get( 2 ) ).getWhenGathered() );
+ }
}
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
public void testConstraint()
throws Exception
{
- ArchivaArtifact artifact;
+ setUpArtifacts();
- // Setup artifacts in fresh DB.
+ assertConstraint( new String[] {}, new UniqueArtifactIdConstraint( "org.apache" ) );
+ assertConstraint( new String[] { "commons-lang" }, new UniqueArtifactIdConstraint( "commons-lang" ) );
+ assertConstraint( new String[] { "test-one" }, new UniqueArtifactIdConstraint( "org.apache.maven.test" ) );
+ assertConstraint( new String[] { "test-two", "test-bar" },
+ new UniqueArtifactIdConstraint( "org.apache.maven.shared" ) );
+ assertConstraint( new String[] { "modellong" }, new UniqueArtifactIdConstraint( "org.codehaus.modello" ) );
+ }
+
+ public void testConstraintDisregardGroupId()
+ throws Exception
+ {
+ setUpArtifacts();
+
+ assertConstraintWithMultipleResultTypes( new String[] { "commons-lang", "test-one", "test-two", "test-two", "test-bar", "modellong" },
+ new UniqueArtifactIdConstraint( "testable_repo", true ) );
+ }
+
+ private void setUpArtifacts()
+ throws ArchivaDatabaseException
+ {
+ ArchivaArtifact artifact;
+
+ // Setup artifacts in fresh DB.
artifact = createArtifact( "commons-lang", "commons-lang", "2.0" );
artifactDao.saveArtifact( artifact );
artifact = createArtifact( "org.codehaus.modello", "modellong", "3.0" );
artifactDao.saveArtifact( artifact );
-
- assertConstraint( new String[] {}, new UniqueArtifactIdConstraint( "org.apache" ) );
- assertConstraint( new String[] { "commons-lang" }, new UniqueArtifactIdConstraint( "commons-lang" ) );
- assertConstraint( new String[] { "test-one" }, new UniqueArtifactIdConstraint( "org.apache.maven.test" ) );
- assertConstraint( new String[] { "test-two", "test-bar" },
- new UniqueArtifactIdConstraint( "org.apache.maven.shared" ) );
- assertConstraint( new String[] { "modellong" }, new UniqueArtifactIdConstraint( "org.codehaus.modello" ) );
}
-
+
+ private void assertConstraintWithMultipleResultTypes( String[] artifactIds, SimpleConstraint constraint )
+ throws Exception
+ {
+ String prefix = "Unique Artifact IDs: ";
+
+ List<Object[]> results = dao.query( constraint );
+ assertNotNull( prefix + "Not Null", results );
+ assertEquals( prefix + "Results.size", artifactIds.length, results.size() );
+
+ List<String> expectedArtifactIds = Arrays.asList( artifactIds );
+
+ Iterator<Object[]> it = results.iterator();
+ while ( it.hasNext() )
+ {
+ Object[] actualArtifactIds = (Object[]) it.next();
+ String actualArtifactId = ( String ) actualArtifactIds[1];
+ assertTrue( prefix + "artifactId result should not be blank.", StringUtils.isNotBlank( actualArtifactId ) );
+ assertTrue( prefix + " artifactId result <" + actualArtifactId + "> exists in expected artifactIds.",
+ expectedArtifactIds.contains( actualArtifactId ) );
+ }
+ }
+
private void assertConstraint( String[] artifactIds, SimpleConstraint constraint )
{
String prefix = "Unique Artifact IDs: ";
- List results = dao.query( constraint );
+ List<String> results = dao.query( constraint );
assertNotNull( prefix + "Not Null", results );
assertEquals( prefix + "Results.size", artifactIds.length, results.size() );
- List expectedArtifactIds = Arrays.asList( artifactIds );
+ List<String> expectedArtifactIds = Arrays.asList( artifactIds );
- Iterator it = results.iterator();
+ Iterator<String> it = results.iterator();
while ( it.hasNext() )
{
String actualArtifactId = (String) it.next();
assertConstraint( new String[] { "org.codehaus.modello", "org.codehaus.mojo", "org.apache.archiva" },
new UniqueGroupIdConstraint( observableRepositories ) );
- }
-
+ }
+
private void assertConstraint( String[] expectedGroupIds, SimpleConstraint constraint )
throws Exception
{
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
</build>
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * ArchivaReportException
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version $Id: ArchivaReportException.java
+ */
+public class ArchivaReportException
+ extends Exception
+{
+ public ArchivaReportException( String message, Throwable cause )
+ {
+ super( message, cause );
+ }
+
+ public ArchivaReportException( String message )
+ {
+ super( message );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+import java.util.Date;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * RepositoryStatistics
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ */
+public class RepositoryStatistics
+{
+ private String repositoryId;
+
+ private long fileCount = 0;
+
+ private long totalSize = 0;
+
+ private long projectCount = 0;
+
+ private long groupCount = 0;
+
+ private long artifactCount = 0;
+
+ private long pluginCount = 0;
+
+ private long archetypeCount = 0;
+
+ private long jarCount = 0;
+
+ private long warCount = 0;
+
+ private long earCount = 0;
+
+ private long dllCount = 0;
+
+ private long exeCount = 0;
+
+ private long pomCount = 0;
+
+ private long deploymentCount = 0;
+
+ private long downloadCount = 0;
+
+ private Date dateOfScan;
+
+ public String getRepositoryId()
+ {
+ return repositoryId;
+ }
+
+ public void setRepositoryId( String repositoryId )
+ {
+ this.repositoryId = repositoryId;
+ }
+
+ public long getFileCount()
+ {
+ return fileCount;
+ }
+
+ public void setFileCount( long fileCount )
+ {
+ this.fileCount = fileCount;
+ }
+
+ public long getTotalSize()
+ {
+ return totalSize;
+ }
+
+ public void setTotalSize( long totalSize )
+ {
+ this.totalSize = totalSize;
+ }
+
+ public long getProjectCount()
+ {
+ return projectCount;
+ }
+
+ public void setProjectCount( long projectCount )
+ {
+ this.projectCount = projectCount;
+ }
+
+ public long getGroupCount()
+ {
+ return groupCount;
+ }
+
+ public void setGroupCount( long groupCount )
+ {
+ this.groupCount = groupCount;
+ }
+
+ public long getArtifactCount()
+ {
+ return artifactCount;
+ }
+
+ public void setArtifactCount( long artifactCount )
+ {
+ this.artifactCount = artifactCount;
+ }
+
+ public long getPluginCount()
+ {
+ return pluginCount;
+ }
+
+ public void setPluginCount( long pluginCount )
+ {
+ this.pluginCount = pluginCount;
+ }
+
+ public long getArchetypeCount()
+ {
+ return archetypeCount;
+ }
+
+ public void setArchetypeCount( long archetypeCount )
+ {
+ this.archetypeCount = archetypeCount;
+ }
+
+ public long getJarCount()
+ {
+ return jarCount;
+ }
+
+ public void setJarCount( long jarCount )
+ {
+ this.jarCount = jarCount;
+ }
+
+ public long getWarCount()
+ {
+ return warCount;
+ }
+
+ public void setWarCount( long warCount )
+ {
+ this.warCount = warCount;
+ }
+
+ public long getEarCount()
+ {
+ return earCount;
+ }
+
+ public void setEarCount( long earCount )
+ {
+ this.earCount = earCount;
+ }
+
+ public long getDllCount()
+ {
+ return dllCount;
+ }
+
+ public void setDllCount( long dllCount )
+ {
+ this.dllCount = dllCount;
+ }
+
+ public long getExeCount()
+ {
+ return exeCount;
+ }
+
+ public void setExeCount( long exeCount )
+ {
+ this.exeCount = exeCount;
+ }
+
+ public long getPomCount()
+ {
+ return pomCount;
+ }
+
+ public void setPomCount( long pomCount )
+ {
+ this.pomCount = pomCount;
+ }
+
+ public long getDeploymentCount()
+ {
+ return deploymentCount;
+ }
+
+ public void setDeploymentCount( long deploymentCount )
+ {
+ this.deploymentCount = deploymentCount;
+ }
+
+ public long getDownloadCount()
+ {
+ return downloadCount;
+ }
+
+ public void setDownloadCount( long downloadCount )
+ {
+ this.downloadCount = downloadCount;
+ }
+
+ public Date getDateOfScan()
+ {
+ return dateOfScan;
+ }
+
+ public void setDateOfScan( Date dateOfScan )
+ {
+ this.dateOfScan = dateOfScan;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.Date;
+import java.util.List;
+
+import org.apache.maven.archiva.model.RepositoryContentStatistics;
+
+/**
+ * RepositoryStatisticsReportGenerator
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version $Id: RepositoryStatisticsReportGenerator.java
+ */
+public interface RepositoryStatisticsReportGenerator
+{
+ public static final String JAR_TYPE = "jar";
+
+ public static final String WAR_TYPE = "war";
+
+ public static final String POM_TYPE = "pom";
+
+ public static final String MAVEN_PLUGIN = "maven-plugin";
+
+ public static final String ARCHETYPE = "archetype";
+
+ public List<RepositoryStatistics> generateReport( List<RepositoryContentStatistics> repoContentStats, String repository, Date startDate, Date endDate, DataLimits limits )
+ throws ArchivaReportException;
+
+ public List<RepositoryStatistics> generateReport( List<RepositoryContentStatistics> repoContentStats, String repository, Date startDate, Date endDate )
+ throws ArchivaReportException;
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
+import org.apache.maven.archiva.model.RepositoryContentStatistics;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * SimpleRepositoryStatisticsReportGenerator
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version $Id: SimpleRepositoryStatisticsReportGenerator.java
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.RepositoryStatisticsReportGenerator" role-hint="simple"
+ */
+public class SimpleRepositoryStatisticsReportGenerator
+ implements RepositoryStatisticsReportGenerator
+{
+ private Logger log = LoggerFactory.getLogger( SimpleRepositoryStatisticsReportGenerator.class );
+
+ /**
+ * @plexus.requirement role-hint="jdo"
+ */
+ private ArchivaDAO dao;
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.maven.archiva.reporting.RepositoryStatisticsReportGenerator#generateReport(java.util.List
+ * repoContentStats, java.util.String repository, java.util.Date startDate, java.util.Date endDate,
+ * org.apache.maven.archiva.reporting.DataLimits limits )
+ */
+ public List<RepositoryStatistics> generateReport( List<RepositoryContentStatistics> repoContentStats,
+ String repository, Date startDate, Date endDate, DataLimits limits )
+ throws ArchivaReportException
+ {
+ if( limits.getCurrentPage() > limits.getCountOfPages() )
+ {
+ throw new ArchivaReportException( "The requested page exceeds the total number of pages." );
+ }
+
+ int start = ( limits.getPerPageCount() * limits.getCurrentPage() ) - limits.getPerPageCount();
+ int end = ( start + limits.getPerPageCount() ) - 1;
+
+ if( end > repoContentStats.size() )
+ {
+ end = repoContentStats.size() - 1;
+ }
+
+ return constructRepositoryStatistics( repoContentStats, repository, endDate, start, end );
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @see org.apache.maven.archiva.reporting.RepositoryStatisticsReportGenerator#generateReport(java.util.List
+ * repoContentStats, java.util.String repository, java.util.Date startDate, java.util.Date endDate)
+ */
+ public List<RepositoryStatistics> generateReport( List<RepositoryContentStatistics> repoContentStats, String repository, Date startDate, Date endDate )
+ throws ArchivaReportException
+ {
+ return constructRepositoryStatistics( repoContentStats, repository, endDate, 0, repoContentStats.size() - 1 );
+ }
+
+ private List<RepositoryStatistics> constructRepositoryStatistics(
+ List<RepositoryContentStatistics> repoContentStats,
+ String repository, Date endDate,
+ int start, int end )
+ {
+ ArtifactDAO artifactDao = dao.getArtifactDAO();
+
+ List<RepositoryStatistics> repoStatisticsList = new ArrayList<RepositoryStatistics>();
+ for( int i = start; i <= end; i++ )
+ {
+ RepositoryContentStatistics repoContentStat = (RepositoryContentStatistics) repoContentStats.get( i );
+ RepositoryStatistics repoStatistics = new RepositoryStatistics();
+ repoStatistics.setRepositoryId( repository );
+
+ // get only the latest
+ repoStatistics.setArtifactCount( repoContentStat.getTotalArtifactCount() );
+ repoStatistics.setGroupCount( repoContentStat.getTotalGroupCount() );
+ repoStatistics.setProjectCount( repoContentStat.getTotalProjectCount() );
+ repoStatistics.setTotalSize( repoContentStat.getTotalSize() );
+ repoStatistics.setFileCount( repoContentStat.getTotalFileCount() );
+ repoStatistics.setDateOfScan( repoContentStat.getWhenGathered() );
+
+ try
+ {
+ //TODO use the repo content stats whenGathered date instead of endDate for single repo reports
+ List types = artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( repository, JAR_TYPE, endDate, "whenGathered" ) );
+ repoStatistics.setJarCount( types.size() );
+
+ types = artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( repository, WAR_TYPE, endDate, "whenGathered" ) );
+ repoStatistics.setWarCount( types.size() );
+
+ types = artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( repository, MAVEN_PLUGIN, endDate, "whenGathered" ) );
+ repoStatistics.setPluginCount( types.size() );
+
+ // TODO: must need to be able to track archetypes. possible way of identifying an
+ // archetype is by checking if archetype.xml exists in src/main/resources/META-INF/
+
+ }
+ catch( ArchivaDatabaseException e )
+ {
+ log.error( "Error occurred while querying artifacts from the database.", e.getMessage() );
+ }
+
+ repoStatisticsList.add( repoStatistics );
+ }
+
+ return repoStatisticsList;
+ }
+
+ public void setDao( ArchivaDAO dao )
+ {
+ this.dao = dao;
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.reporting;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArtifactDAO;
+import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
+import org.apache.maven.archiva.model.ArchivaArtifact;
+import org.apache.maven.archiva.model.RepositoryContentStatistics;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.easymock.MockControl;
+import org.easymock.internal.AlwaysMatcher;
+
+/**
+ * SimpleRepositoryStatisticsReportGeneratorTest
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version $Id: SimpleRepositoryStatisticsReportGenerator.java
+ *
+ * @plexus.component role="org.apache.maven.archiva.reporting.RepositoryStatisticsReportGenerator" role-hint="simple"
+ */
+public class SimpleRepositoryStatisticsReportGeneratorTest
+ extends PlexusInSpringTestCase
+{
+ private MockControl daoControl;
+
+ private ArchivaDAO dao;
+
+ private MockControl artifactDaoControl;
+
+ private ArtifactDAO artifactDao;
+
+ private SimpleRepositoryStatisticsReportGenerator generator;
+
+ private static final String REPO = "test-repo";
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ daoControl = MockControl.createControl( ArchivaDAO.class );
+ dao = ( ArchivaDAO ) daoControl.getMock();
+
+ generator = new SimpleRepositoryStatisticsReportGenerator();
+ generator.setDao( dao );
+
+ artifactDaoControl = MockControl.createControl( ArtifactDAO.class );
+ artifactDaoControl.setDefaultMatcher( new AlwaysMatcher() );
+ artifactDao = ( ArtifactDAO ) artifactDaoControl.getMock();
+ }
+
+ private Date toDate( int year, int month, int date, int hour, int min, int sec )
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.clear();
+ cal.set( year, month, date, hour, min, sec );
+
+ return cal.getTime();
+ }
+
+ private List<ArchivaArtifact> createArtifacts( String type )
+ {
+ List<ArchivaArtifact> artifacts = new ArrayList<ArchivaArtifact>();
+ artifacts.add( createArtifact( REPO, "org.apache.archiva", "repository-statistics-" + type, "1.0", type ) );
+ artifacts.add( createArtifact( REPO, "org.apache.archiva", "repository-statistics-" + type, "1.1", type ) );
+ artifacts.add( createArtifact( REPO, "org.apache.archiva", "repository-statistics-" + type, "1.2", type ) );
+ artifacts.add( createArtifact( REPO, "org.apache.archiva", "repository-statistics-" + type, "2.0", type ) );
+ artifacts.add( createArtifact( REPO, "org.apache.archiva", "repository-statistics-" + type, "3.0", type ) );
+
+ return artifacts;
+ }
+
+ private ArchivaArtifact createArtifact( String repoId, String groupId, String artifactId, String version, String type )
+ {
+ ArchivaArtifact artifact = new ArchivaArtifact( groupId, artifactId, version, null, type );
+ artifact.getModel().setLastModified( new Date() );
+ artifact.getModel().setRepositoryId( repoId );
+
+ return artifact;
+ }
+
+ private RepositoryContentStatistics createRepositoryContentStatistics( Date startDate, String repositoryId )
+ {
+ RepositoryContentStatistics repoContentStats = new RepositoryContentStatistics();
+ repoContentStats.setRepositoryId( repositoryId );
+ repoContentStats.setDuration( 10000 );
+ repoContentStats.setNewFileCount( 100 );
+ repoContentStats.setTotalArtifactCount( 200 );
+ repoContentStats.setTotalFileCount( 250 );
+ repoContentStats.setTotalGroupCount( 100 );
+ repoContentStats.setTotalProjectCount( 180 );
+ repoContentStats.setTotalSize( 200000 );
+ repoContentStats.setWhenGathered( startDate );
+
+ return repoContentStats;
+ }
+
+ private List<RepositoryContentStatistics> createStatisticsHistoryForSingleRepositoryTest( String repoId )
+ {
+ List<RepositoryContentStatistics> repoContentStatsList = new ArrayList<RepositoryContentStatistics>();
+
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 11, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 10, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 10, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 9, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 9, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 8, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 8, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 7, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 7, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 6, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 6, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 5, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 5, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 4, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 4, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 3, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 3, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 2, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 2, 1, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 1, 16, 0, 0, 0 ), repoId ) );
+ repoContentStatsList.add( createRepositoryContentStatistics( toDate( 2008, 1, 1, 0, 0, 0 ), repoId ) );
+
+ return repoContentStatsList;
+ }
+
+ public void testSimpleReportWithPagination()
+ throws Exception
+ {
+ Date startDate = toDate( 2008, 1, 1, 0, 0, 0 );
+ Date endDate = toDate( 2008, 11, 30, 0, 0, 0 );
+
+ DataLimits limits = new DataLimits();
+ limits.setPerPageCount( 5 );
+ limits.setCurrentPage( 1 );
+ limits.setCountOfPages( 5 );
+ limits.setTotalCount( 21 );
+
+ List<ArchivaArtifact> jarArtifacts = createArtifacts( RepositoryStatisticsReportGenerator.JAR_TYPE );
+ List<ArchivaArtifact> warArtifacts = createArtifacts( RepositoryStatisticsReportGenerator.WAR_TYPE );
+ List<ArchivaArtifact> mavenPlugins = createArtifacts( RepositoryStatisticsReportGenerator.MAVEN_PLUGIN );
+
+ List<RepositoryContentStatistics> repoContentStats = createStatisticsHistoryForSingleRepositoryTest( REPO );
+
+ // get first page
+ daoControl.expectAndReturn( dao.getArtifactDAO(), artifactDao );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.JAR_TYPE, endDate, "whenGathered") ), jarArtifacts, 5 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.WAR_TYPE, endDate, "whenGathered") ), warArtifacts, 5 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.MAVEN_PLUGIN, endDate, "whenGathered") ), mavenPlugins, 5 );
+
+ daoControl.replay();
+ artifactDaoControl.replay();
+
+ List<RepositoryStatistics> data = generator.generateReport( repoContentStats, REPO, startDate, endDate, limits );
+
+ daoControl.verify();
+ artifactDaoControl.verify();
+
+ assertEquals( 5, data.size() );
+
+ RepositoryStatistics stats = (RepositoryStatistics) data.get( 0 );
+ assertEquals( REPO, stats.getRepositoryId() );
+ assertEquals( 200, stats.getArtifactCount() );
+ assertEquals( 5, stats.getJarCount() );
+ assertEquals( 5, stats.getWarCount() );
+ assertEquals( 5, stats.getPluginCount() );
+ assertEquals( toDate( 2008, 11, 1, 0, 0, 0 ).getTime(), stats.getDateOfScan().getTime() );
+ assertEquals( toDate( 2008, 9, 1, 0, 0, 0 ).getTime(), ( (RepositoryStatistics) data.get( 4 ) ).getDateOfScan().getTime() );
+
+ // get last page
+ limits.setCurrentPage( 5 );
+
+ daoControl.reset();
+ artifactDaoControl.reset();
+
+ artifactDaoControl.setDefaultMatcher( new AlwaysMatcher() );
+
+ daoControl.expectAndReturn( dao.getArtifactDAO(), artifactDao );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.JAR_TYPE, endDate, "whenGathered") ), jarArtifacts );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.WAR_TYPE, endDate, "whenGathered") ), warArtifacts );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.MAVEN_PLUGIN, endDate, "whenGathered") ), mavenPlugins );
+
+ daoControl.replay();
+ artifactDaoControl.replay();
+
+ data = generator.generateReport( repoContentStats, REPO, startDate, endDate, limits );
+
+ daoControl.verify();
+ artifactDaoControl.verify();
+
+ assertEquals( 1, data.size() );
+
+ stats = (RepositoryStatistics) data.get( 0 );
+ assertEquals( REPO, stats.getRepositoryId() );
+ assertEquals( 200, stats.getArtifactCount() );
+ assertEquals( 5, stats.getJarCount() );
+ assertEquals( 5, stats.getWarCount() );
+ assertEquals( 5, stats.getPluginCount() );
+ assertEquals( toDate( 2008, 1, 1, 0, 0, 0 ).getTime(), stats.getDateOfScan().getTime() );
+ }
+
+ public void testSimpleReportWithoutPagination()
+ throws Exception
+ {
+ Date startDate = toDate( 2008, 1, 1, 0, 0, 0 );
+ Date endDate = toDate( 2008, 11, 30, 0, 0, 0 );
+
+ List<ArchivaArtifact> jarArtifacts = createArtifacts( RepositoryStatisticsReportGenerator.JAR_TYPE );
+ List<ArchivaArtifact> warArtifacts = createArtifacts( RepositoryStatisticsReportGenerator.WAR_TYPE );
+ List<ArchivaArtifact> mavenPlugins = createArtifacts( RepositoryStatisticsReportGenerator.MAVEN_PLUGIN );
+
+ List<RepositoryContentStatistics> repoContentStats = createStatisticsHistoryForSingleRepositoryTest( REPO );
+
+ // get first page
+ daoControl.expectAndReturn( dao.getArtifactDAO(), artifactDao );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.JAR_TYPE, endDate, "whenGathered") ), jarArtifacts, 21 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.WAR_TYPE, endDate, "whenGathered") ), warArtifacts, 21 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.MAVEN_PLUGIN, endDate, "whenGathered") ), mavenPlugins, 21 );
+
+ daoControl.replay();
+ artifactDaoControl.replay();
+
+ List<RepositoryStatistics> data = generator.generateReport( repoContentStats, REPO, startDate, endDate );
+
+ daoControl.verify();
+ artifactDaoControl.verify();
+
+ assertEquals( 21, data.size() );
+
+ RepositoryStatistics stats = (RepositoryStatistics) data.get( 0 );
+ assertEquals( REPO, stats.getRepositoryId() );
+ assertEquals( 200, stats.getArtifactCount() );
+ assertEquals( 5, stats.getJarCount() );
+ assertEquals( 5, stats.getWarCount() );
+ assertEquals( 5, stats.getPluginCount() );
+ assertEquals( toDate( 2008, 11, 1, 0, 0, 0 ).getTime(), stats.getDateOfScan().getTime() );
+ assertEquals( toDate( 2008, 1, 1, 0, 0, 0 ).getTime(), ( (RepositoryStatistics) data.get( 20 ) ).getDateOfScan().getTime() );
+ }
+
+ public void testSimpleReportNoArtifactCountStatisticsAvailable()
+ throws Exception
+ {
+ Date startDate = toDate( 2008, 1, 1, 0, 0, 0 );
+ Date endDate = toDate( 2008, 11, 30, 0, 0, 0 );
+
+ DataLimits limits = new DataLimits();
+ limits.setPerPageCount( 5 );
+ limits.setCurrentPage( 1 );
+ limits.setCountOfPages( 5 );
+ limits.setTotalCount( 21 );
+
+ List<ArchivaArtifact> jarArtifacts = new ArrayList<ArchivaArtifact>();
+ List<ArchivaArtifact> warArtifacts = new ArrayList<ArchivaArtifact>();
+ List<ArchivaArtifact> mavenPlugins = new ArrayList<ArchivaArtifact>();
+
+ List<RepositoryContentStatistics> repoContentStats = createStatisticsHistoryForSingleRepositoryTest( REPO );
+
+ daoControl.expectAndReturn( dao.getArtifactDAO(), artifactDao );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.JAR_TYPE, endDate, "whenGathered") ), jarArtifacts, 5 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.WAR_TYPE, endDate, "whenGathered") ), warArtifacts, 5 );
+
+ artifactDaoControl.expectAndReturn( artifactDao.queryArtifacts(
+ new ArtifactsByRepositoryConstraint( REPO, RepositoryStatisticsReportGenerator.MAVEN_PLUGIN, endDate, "whenGathered") ), mavenPlugins, 5 );
+
+ daoControl.replay();
+ artifactDaoControl.replay();
+
+ List<RepositoryStatistics> data = generator.generateReport( repoContentStats, REPO, startDate, endDate, limits );
+
+ daoControl.verify();
+ artifactDaoControl.verify();
+
+ assertEquals( 5, data.size() );
+
+ RepositoryStatistics stats = (RepositoryStatistics) data.get( 0 );
+ assertEquals( REPO, stats.getRepositoryId() );
+ assertEquals( 200, stats.getArtifactCount() );
+ assertEquals( 0, stats.getJarCount() );
+ assertEquals( 0, stats.getWarCount() );
+ assertEquals( 0, stats.getPluginCount() );
+ assertEquals( toDate( 2008, 11, 1, 0, 0, 0 ).getTime(), stats.getDateOfScan().getTime() );
+ assertEquals( toDate( 2008, 9, 1, 0, 0, 0 ).getTime(), ( (RepositoryStatistics) data.get( 4 ) ).getDateOfScan().getTime() );
+ // no results found when ArtifactDAO was queried
+ }
+
+ public void testSimpleReportWithPaginationInvalidRequestedPage()
+ throws Exception
+ {
+ Date startDate = toDate( 2008, 1, 1, 0, 0, 0 );
+ Date endDate = toDate( 2008, 11, 30, 0, 0, 0 );
+
+ DataLimits limits = new DataLimits();
+ limits.setPerPageCount( 5 );
+ limits.setCurrentPage( 10 );
+ limits.setCountOfPages( 5 );
+ limits.setTotalCount( 21 );
+
+ List<RepositoryContentStatistics> repoContentStats = createStatisticsHistoryForSingleRepositoryTest( REPO );
+
+ try
+ {
+ List<RepositoryStatistics> data = generator.generateReport( repoContentStats, REPO, startDate, endDate, limits );
+ fail( "An ArchivaReportException should have been thrown." );
+ }
+ catch ( ArchivaReportException a )
+ {
+
+ }
+ // requested page exceeds total number of pages
+ }
+}
*/
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
+import org.apache.maven.archiva.database.ObjectNotFoundException;
+import org.apache.maven.archiva.database.constraints.ArtifactsByRepositoryConstraint;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
+import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
+import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.File;
+import java.util.ArrayList;
import java.util.List;
/**
log.info( "Finished repository task: " + stats.toDump( arepo ) );
- // I hate jpox and modello
- RepositoryContentStatistics dbstats = new RepositoryContentStatistics();
- dbstats.setDuration( stats.getDuration() );
- dbstats.setNewFileCount( stats.getNewFileCount() );
- dbstats.setRepositoryId( stats.getRepositoryId() );
- dbstats.setTotalFileCount( stats.getTotalFileCount() );
- dbstats.setWhenGathered( stats.getWhenGathered() );
+ RepositoryContentStatistics dbstats = constructRepositoryStatistics( arepo, sinceWhen, results, stats );
- dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
+ dao.getRepositoryContentStatisticsDAO().saveRepositoryContentStatistics( dbstats );
}
catch ( RepositoryException e )
- {
+ {
throw new TaskExecutionException( "Repository error when executing repository job.", e );
- }
+ }
}
+
+ private RepositoryContentStatistics constructRepositoryStatistics( ManagedRepositoryConfiguration arepo,
+ long sinceWhen,
+ List<RepositoryContentStatistics> results,
+ RepositoryScanStatistics stats )
+ {
+ // I hate jpox and modello <-- and so do I
+ RepositoryContentStatistics dbstats = new RepositoryContentStatistics();
+ dbstats.setDuration( stats.getDuration() );
+ dbstats.setNewFileCount( stats.getNewFileCount() );
+ dbstats.setRepositoryId( stats.getRepositoryId() );
+ dbstats.setTotalFileCount( stats.getTotalFileCount() );
+ dbstats.setWhenGathered( stats.getWhenGathered() );
+
+ // total artifact count
+ try
+ {
+ List artifacts = dao.getArtifactDAO().queryArtifacts(
+ new ArtifactsByRepositoryConstraint( arepo.getId(), stats.getWhenGathered(), "groupId", true ) );
+ dbstats.setTotalArtifactCount( artifacts.size() );
+ }
+ catch ( ObjectNotFoundException oe )
+ {
+ log.error( "Object not found in the database : " + oe.getMessage() );
+ }
+ catch ( ArchivaDatabaseException ae )
+ {
+ log.error( "Error occurred while querying artifacts for artifact count : " + ae.getMessage() );
+ }
+
+ // total repo size
+ long size = FileUtils.sizeOfDirectory( new File( arepo.getLocation() ) );
+ dbstats.setTotalSize( size );
+
+ // total unique groups
+ List<String> repos = new ArrayList<String>();
+ repos.add( arepo.getId() );
+
+ List<String> groupIds = dao.query( new UniqueGroupIdConstraint( repos ) );
+ dbstats.setTotalGroupCount( groupIds.size() );
+
+ List<Object[]> artifactIds = dao.query( new UniqueArtifactIdConstraint( arepo.getId(), true ) );
+ dbstats.setTotalProjectCount( artifactIds.size() );
+
+ return dbstats;
+ }
}
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId>
</dependency>
+ <!-- TODO: replace with metadata processor -->
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
Calendar greaterThanThisDate = Calendar.getInstance( DateUtils.UTC_TIME_ZONE );
greaterThanThisDate.add( Calendar.DATE, -( getNumberOfDaysBeforeNow() ) );
- Constraint artifactsByRepo = new ArtifactsByRepositoryConstraint( repoId, greaterThanThisDate.getTime(), "whenGathered" );
+ Constraint artifactsByRepo = new ArtifactsByRepositoryConstraint( repoId, greaterThanThisDate.getTime(), "whenGathered", false );
List<ArchivaArtifact> artifacts = artifactDAO.queryArtifacts( artifactsByRepo );
List<RssFeedEntry> entries = processData( artifacts, true );
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-system</artifactId>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-model</artifactId>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-role-manager</artifactId>
</dependency>
<!-- Test Scoped -->
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-authorization-rbac</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-keys-memory</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-users-memory</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-memory</artifactId>
<scope>test</scope>
</dependency>
return true;
}
- public boolean isAuthorizedToAccessVirtualRepository( String principal, String repoId )
+ public boolean isAuthorized( String principal, String repoId, boolean isWriteRequest )
throws UnauthorizedException
{
try
{
+ String permission = ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS;
+
+ if ( isWriteRequest )
+ {
+ permission = ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD;
+ }
+
User user = securitySystem.getUserManager().findUser( principal );
if ( user.isLocked() )
{
AuthenticationResult authn = new AuthenticationResult( true, principal, null );
SecuritySession securitySession = new DefaultSecuritySession( authn, user );
- return securitySystem.isAuthorized( securitySession, ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS,
- repoId );
+ return securitySystem.isAuthorized( securitySession, permission, repoId );
}
catch ( UserNotFoundException e )
{
* under the License.
*/
-import java.util.HashMap;
import java.util.Map;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.redback.system.SecuritySession;
import org.codehaus.plexus.redback.system.SecuritySystemConstants;
import org.codehaus.plexus.redback.users.User;
+import org.codehaus.plexus.registry.Registry;
/**
* ArchivaXworkUser
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
+ *
+ * @plexus.component role="org.apache.maven.archiva.security.ArchivaXworkUser"
*/
public class ArchivaXworkUser
{
- public static String getActivePrincipal( Map<String, Object> sessionMap )
- {
+ /**
+ * @plexus.requirement role-hint="commons-configuration"
+ */
+ private Registry registry;
+
+ private static final String KEY = "org.codehaus.plexus.redback";
+
+ private static String guest;
+
+ public String getActivePrincipal( Map<String, Object> sessionMap )
+ {
if ( sessionMap == null )
{
- return ArchivaRoleConstants.PRINCIPAL_GUEST;
+ return getGuest();
}
SecuritySession securitySession =
if ( securitySession == null )
{
- return ArchivaRoleConstants.PRINCIPAL_GUEST;
+ return getGuest();
}
User user = securitySession.getUser();
if ( user == null )
{
- return ArchivaRoleConstants.PRINCIPAL_GUEST;
+ return getGuest();
}
return (String) user.getPrincipal();
+ }
+
+ public String getGuest()
+ {
+ if( guest == null || "".equals( guest ) )
+ {
+ Registry subset = registry.getSubset( KEY );
+ guest = subset.getString( "redback.default.guest", ArchivaRoleConstants.PRINCIPAL_GUEST );
+ }
+
+ return guest;
+ }
+
+ public void setGuest( String guesT )
+ {
+ guest = guesT;
}
}
*/
public interface ServletAuthenticator
{
+ /**
+ * Authentication check for users.
+ *
+ * @param request
+ * @param result
+ * @return
+ * @throws AuthenticationException
+ * @throws AccountLockedException
+ * @throws MustChangePasswordException
+ */
public boolean isAuthenticated( HttpServletRequest request, AuthenticationResult result )
throws AuthenticationException, AccountLockedException, MustChangePasswordException;
+ /**
+ * Authorization check for valid users.
+ *
+ * @param request
+ * @param securitySession
+ * @param repositoryId
+ * @param isWriteRequest
+ * @return
+ * @throws AuthorizationException
+ * @throws UnauthorizedException
+ */
public boolean isAuthorized( HttpServletRequest request, SecuritySession securitySession, String repositoryId,
boolean isWriteRequest ) throws AuthorizationException, UnauthorizedException;
- public boolean isAuthorizedToAccessVirtualRepository( String principal, String repoId )
+ /**
+ * Authorization check specific for user guest, which doesn't go through
+ * HttpBasicAuthentication#getAuthenticationResult( HttpServletRequest request, HttpServletResponse response )
+ * since no credentials are attached to the request.
+ *
+ * See also MRM-911
+ *
+ * @param principal
+ * @param repoId
+ * @param isWriteRequest
+ * @return
+ * @throws UnauthorizedException
+ */
+ public boolean isAuthorized( String principal, String repoId, boolean isWriteRequest )
throws UnauthorizedException;
}
--- /dev/null
+package org.apache.maven.archiva.security;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.codehaus.plexus.redback.rbac.RBACManager;
+import org.codehaus.plexus.redback.role.RoleManager;
+import org.codehaus.plexus.redback.system.SecuritySystem;
+import org.codehaus.plexus.redback.users.User;
+import org.codehaus.plexus.redback.users.UserManager;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+
+/**
+ * AbstractSecurityTest
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version $Id: AbstractSecurityTest
+ */
+public abstract class AbstractSecurityTest
+ extends PlexusInSpringTestCase
+{
+ protected static final String USER_GUEST = "guest";
+
+ protected static final String USER_ADMIN = "admin";
+
+ protected static final String USER_ALPACA = "alpaca";
+
+ protected SecuritySystem securitySystem;
+
+ private RBACManager rbacManager;
+
+ protected RoleManager roleManager;
+
+ private ArchivaConfiguration archivaConfiguration;
+
+ protected UserRepositories userRepos;
+
+ protected void setupRepository( String repoId )
+ throws Exception
+ {
+ // Add repo to configuration.
+ ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
+ repoConfig.setId( repoId );
+ repoConfig.setName( "Testable repo <" + repoId + ">" );
+ repoConfig.setLocation( getTestPath( "target/test-repo/" + repoId ) );
+ archivaConfiguration.getConfiguration().addManagedRepository( repoConfig );
+
+ // Add repo roles to security.
+ userRepos.createMissingRepositoryRoles( repoId );
+ }
+
+ protected void assignRepositoryObserverRole( String principal, String repoId )
+ throws Exception
+ {
+ roleManager.assignTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, repoId, principal );
+ }
+
+ protected User createUser( String principal, String fullname )
+ {
+ UserManager userManager = securitySystem.getUserManager();
+
+ User user = userManager.createUser( principal, fullname, principal + "@testable.archiva.apache.org" );
+ securitySystem.getPolicy().setEnabled( false );
+ userManager.addUser( user );
+ securitySystem.getPolicy().setEnabled( true );
+
+ return user;
+ }
+
+ @Override
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ File srcConfig = getTestFile( "src/test/resources/repository-archiva.xml" );
+ File destConfig = getTestFile( "target/test-conf/archiva.xml" );
+
+ destConfig.getParentFile().mkdirs();
+ destConfig.delete();
+
+ FileUtils.copyFile( srcConfig, destConfig );
+
+ securitySystem = (SecuritySystem) lookup( SecuritySystem.class, "testable" );
+ rbacManager = (RBACManager) lookup( RBACManager.class, "memory" );
+ roleManager = (RoleManager) lookup( RoleManager.class, "default" );
+ userRepos = (UserRepositories) lookup( UserRepositories.class, "default" );
+ archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
+
+ // Some basic asserts.
+ assertNotNull( securitySystem );
+ assertNotNull( rbacManager );
+ assertNotNull( roleManager );
+ assertNotNull( userRepos );
+ assertNotNull( archivaConfiguration );
+
+ // Setup Admin User.
+ User adminUser = createUser( USER_ADMIN, "Admin User" );
+ roleManager.assignRole( ArchivaRoleConstants.TEMPLATE_SYSTEM_ADMIN, adminUser.getPrincipal().toString() );
+
+ // Setup Guest User.
+ User guestUser = createUser( USER_GUEST, "Guest User" );
+ roleManager.assignRole( ArchivaRoleConstants.TEMPLATE_GUEST, guestUser.getPrincipal().toString() );
+ }
+}
--- /dev/null
+package org.apache.maven.archiva.security;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.codehaus.plexus.redback.authentication.AuthenticationException;
+import org.codehaus.plexus.redback.authentication.AuthenticationResult;
+import org.codehaus.plexus.redback.authorization.UnauthorizedException;
+import org.codehaus.plexus.redback.system.DefaultSecuritySession;
+import org.codehaus.plexus.redback.system.SecuritySession;
+import org.codehaus.plexus.redback.users.User;
+import org.codehaus.plexus.redback.users.UserManager;
+
+import org.easymock.MockControl;
+
+/**
+ * ArchivaServletAuthenticatorTest
+ *
+ * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
+ * @version
+ */
+public class ArchivaServletAuthenticatorTest
+ extends AbstractSecurityTest
+{
+ private ServletAuthenticator servletAuth;
+
+ private MockControl httpServletRequestControl;
+
+ private HttpServletRequest request;
+
+ @Override
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ servletAuth = ( ServletAuthenticator ) lookup( ServletAuthenticator.class, "default" );
+
+ httpServletRequestControl = MockControl.createControl( HttpServletRequest.class );
+ request = ( HttpServletRequest ) httpServletRequestControl.getMock();
+
+ setupRepository( "corporate" );
+ }
+
+ @Override
+ protected String getPlexusConfigLocation()
+ {
+ return "org/apache/maven/archiva/security/ArchivaServletAuthenticatorTest.xml";
+ }
+
+ protected void assignRepositoryManagerRole( String principal, String repoId )
+ throws Exception
+ {
+ roleManager.assignTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, repoId, principal );
+ }
+
+ public void testIsAuthenticatedUserExists()
+ throws Exception
+ {
+ AuthenticationResult result = new AuthenticationResult( true, "user", null );
+ boolean isAuthenticated = servletAuth.isAuthenticated( request, result );
+
+ assertTrue( isAuthenticated );
+ }
+
+ public void testIsAuthenticatedUserDoesNotExist()
+ throws Exception
+ {
+ AuthenticationResult result = new AuthenticationResult( false, "non-existing-user", null );
+ try
+ {
+ servletAuth.isAuthenticated( request, result );
+ fail( "Authentication exception should have been thrown." );
+ }
+ catch ( AuthenticationException e )
+ {
+ assertEquals( "User Credentials Invalid", e.getMessage() );
+ }
+ }
+
+ public void testIsAuthorizedUserHasWriteAccess()
+ throws Exception
+ {
+ createUser( USER_ALPACA, "Al 'Archiva' Paca" );
+
+ assignRepositoryManagerRole( USER_ALPACA, "corporate" );
+
+ UserManager userManager = securitySystem.getUserManager();
+ User user = userManager.findUser( USER_ALPACA );
+
+ AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
+
+ SecuritySession session = new DefaultSecuritySession( result, user );
+ boolean isAuthorized = servletAuth.isAuthorized( request, session, "corporate", true );
+
+ assertTrue( isAuthorized );
+ }
+
+ public void testIsAuthorizedUserHasNoWriteAccess()
+ throws Exception
+ {
+ createUser( USER_ALPACA, "Al 'Archiva' Paca" );
+
+ assignRepositoryObserverRole( USER_ALPACA, "corporate" );
+
+ httpServletRequestControl.expectAndReturn( request.getRemoteAddr(), "192.168.111.111" );
+
+ UserManager userManager = securitySystem.getUserManager();
+ User user = userManager.findUser( USER_ALPACA );
+
+ AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
+
+ SecuritySession session = new DefaultSecuritySession( result, user );
+
+ httpServletRequestControl.replay();
+
+ try
+ {
+ servletAuth.isAuthorized( request, session, "corporate", true );
+ fail( "UnauthorizedException should have been thrown." );
+ }
+ catch ( UnauthorizedException e )
+ {
+ assertEquals( "Access denied for repository corporate", e.getMessage() );
+ }
+
+ httpServletRequestControl.verify();
+ }
+
+
+ public void testIsAuthorizedUserHasReadAccess()
+ throws Exception
+ {
+ createUser( USER_ALPACA, "Al 'Archiva' Paca" );
+
+ assignRepositoryObserverRole( USER_ALPACA, "corporate" );
+
+ UserManager userManager = securitySystem.getUserManager();
+ User user = userManager.findUser( USER_ALPACA );
+
+ AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
+
+ SecuritySession session = new DefaultSecuritySession( result, user );
+ boolean isAuthorized = servletAuth.isAuthorized( request, session, "corporate", false );
+
+ assertTrue( isAuthorized );
+ }
+
+ public void testIsAuthorizedUserHasNoReadAccess()
+ throws Exception
+ {
+ createUser( USER_ALPACA, "Al 'Archiva' Paca" );
+
+ UserManager userManager = securitySystem.getUserManager();
+ User user = userManager.findUser( USER_ALPACA );
+
+ AuthenticationResult result = new AuthenticationResult( true, USER_ALPACA, null );
+
+ SecuritySession session = new DefaultSecuritySession( result, user );
+ try
+ {
+ servletAuth.isAuthorized( request, session, "corporate", false );
+ fail( "UnauthorizedException should have been thrown." );
+ }
+ catch ( UnauthorizedException e )
+ {
+ assertEquals( "Access denied for repository corporate", e.getMessage() );
+ }
+ }
+
+ public void testIsAuthorizedGuestUserHasWriteAccess()
+ throws Exception
+ {
+ assignRepositoryManagerRole( USER_GUEST, "corporate" );
+ boolean isAuthorized = servletAuth.isAuthorized( USER_GUEST, "corporate", true );
+
+ assertTrue( isAuthorized );
+ }
+
+ public void testIsAuthorizedGuestUserHasNoWriteAccess()
+ throws Exception
+ {
+ assignRepositoryObserverRole( USER_GUEST, "corporate" );
+
+ boolean isAuthorized = servletAuth.isAuthorized( USER_GUEST, "corporate", true );
+ assertFalse( isAuthorized );
+ }
+
+ public void testIsAuthorizedGuestUserHasReadAccess()
+ throws Exception
+ {
+ assignRepositoryObserverRole( USER_GUEST, "corporate" );
+
+ boolean isAuthorized = servletAuth.isAuthorized( USER_GUEST, "corporate", false );
+
+ assertTrue( isAuthorized );
+ }
+
+ public void testIsAuthorizedGuestUserHasNoReadAccess()
+ throws Exception
+ {
+ boolean isAuthorized = servletAuth.isAuthorized( USER_GUEST, "corporate", false );
+
+ assertFalse( isAuthorized );
+ }
+}
* under the License.
*/
-import java.io.File;
import java.util.List;
-import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.configuration.ArchivaConfiguration;
-import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.codehaus.plexus.spring.PlexusInSpringTestCase;
-import org.codehaus.plexus.redback.rbac.RBACManager;
-import org.codehaus.plexus.redback.role.RoleManager;
-import org.codehaus.plexus.redback.system.SecuritySystem;
-import org.codehaus.plexus.redback.users.User;
-import org.codehaus.plexus.redback.users.UserManager;
/**
* DefaultUserRepositoriesTest
* @version $Id$
*/
public class DefaultUserRepositoriesTest
- extends PlexusInSpringTestCase
-{
- private static final String USER_GUEST = "guest";
-
- private static final String USER_ADMIN = "admin";
-
- private static final String USER_ALPACA = "alpaca";
-
- private SecuritySystem securitySystem;
-
- private RBACManager rbacManager;
-
- private RoleManager roleManager;
-
- private ArchivaConfiguration archivaConfiguration;
-
- private UserRepositories userRepos;
-
+ extends AbstractSecurityTest
+{
+ @Override
+ protected String getPlexusConfigLocation()
+ {
+ return "org/apache/maven/archiva/security/DefaultUserRepositoriesTest.xml";
+ }
+
public void testGetObservableRepositoryIds()
throws Exception
{
}
}
- private void setupRepository( String repoId )
- throws Exception
- {
- // Add repo to configuration.
- ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
- repoConfig.setId( repoId );
- repoConfig.setName( "Testable repo <" + repoId + ">" );
- repoConfig.setLocation( getTestPath( "target/test-repo/" + repoId ) );
- archivaConfiguration.getConfiguration().addManagedRepository( repoConfig );
-
- // Add repo roles to security.
- userRepos.createMissingRepositoryRoles( repoId );
- }
-
private void assignGlobalRepositoryObserverRole( String principal )
throws Exception
{
roleManager.assignRole( ArchivaRoleConstants.TEMPLATE_GLOBAL_REPOSITORY_OBSERVER, principal );
}
-
- private void assignRepositoryObserverRole( String principal, String repoId )
- throws Exception
- {
- roleManager.assignTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, repoId, principal );
- }
-
- private User createUser( String principal, String fullname )
- {
- UserManager userManager = securitySystem.getUserManager();
-
- User user = userManager.createUser( principal, fullname, principal + "@testable.archiva.apache.org" );
- securitySystem.getPolicy().setEnabled( false );
- userManager.addUser( user );
- securitySystem.getPolicy().setEnabled( true );
-
- return user;
- }
-
- @Override
- protected void setUp()
- throws Exception
- {
- super.setUp();
-
- File srcConfig = getTestFile( "src/test/resources/repository-archiva.xml" );
- File destConfig = getTestFile( "target/test-conf/archiva.xml" );
-
- destConfig.getParentFile().mkdirs();
- destConfig.delete();
-
- FileUtils.copyFile( srcConfig, destConfig );
-
- securitySystem = (SecuritySystem) lookup( SecuritySystem.class, "testable" );
- rbacManager = (RBACManager) lookup( RBACManager.class, "memory" );
- roleManager = (RoleManager) lookup( RoleManager.class, "default" );
- userRepos = (UserRepositories) lookup( UserRepositories.class, "default" );
- archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
-
- // Some basic asserts.
- assertNotNull( securitySystem );
- assertNotNull( rbacManager );
- assertNotNull( roleManager );
- assertNotNull( userRepos );
- assertNotNull( archivaConfiguration );
-
- // Setup Admin User.
- User adminUser = createUser( USER_ADMIN, "Admin User" );
- roleManager.assignRole( ArchivaRoleConstants.TEMPLATE_SYSTEM_ADMIN, adminUser.getPrincipal().toString() );
-
- // Setup Guest User.
- User guestUser = createUser( USER_GUEST, "Guest User" );
- roleManager.assignRole( ArchivaRoleConstants.TEMPLATE_GUEST, guestUser.getPrincipal().toString() );
-
- }
}
--- /dev/null
+<?xml version="1.0" ?>
+<component-set>
+ <components>
+
+ <component>
+ <role>org.apache.maven.archiva.security.ServletAuthenticator</role>
+ <role-hint>default</role-hint>
+ <implementation>org.apache.maven.archiva.security.ArchivaServletAuthenticator</implementation>
+ <description>ArchivaServletAuthenticator</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.system.SecuritySystem</role>
+ <role-hint>testable</role-hint>
+ <field-name>securitySystem</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.security.UserRepositories</role>
+ <role-hint>default</role-hint>
+ <implementation>org.apache.maven.archiva.security.DefaultUserRepositories</implementation>
+ <description>DefaultUserRepositories</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.system.SecuritySystem</role>
+ <role-hint>testable</role-hint>
+ <field-name>securitySystem</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>rbacManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.role.RoleManager</role>
+ <role-hint>default</role-hint>
+ <field-name>roleManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <field-name>archivaConfiguration</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.system.SecuritySystem</role>
+ <role-hint>testable</role-hint>
+ <implementation>org.codehaus.plexus.redback.system.DefaultSecuritySystem</implementation>
+ <description>DefaultSecuritySystem:</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.authentication.AuthenticationManager</role>
+ <field-name>authnManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.authorization.Authorizer</role>
+ <role-hint>rbac</role-hint>
+ <field-name>authorizer</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.users.UserManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>userManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.keys.KeyManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>keyManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.policy.UserSecurityPolicy</role>
+ <field-name>policy</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.authorization.Authorizer</role>
+ <role-hint>rbac</role-hint>
+ <implementation>org.codehaus.plexus.redback.authorization.rbac.RbacAuthorizer</implementation>
+ <description>RbacAuthorizer:</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>manager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.users.UserManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>userManager</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.authorization.rbac.evaluator.PermissionEvaluator</role>
+ <role-hint>default</role-hint>
+ <field-name>evaluator</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.configuration.UserConfiguration</role>
+ <role-hint>default</role-hint>
+ <field-name>config</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.authorization.rbac.evaluator.PermissionEvaluator</role>
+ <role-hint>default</role-hint>
+ <implementation>org.codehaus.plexus.redback.authorization.rbac.evaluator.DefaultPermissionEvaluator</implementation>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.users.UserManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>userManager</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.role.RoleManager</role>
+ <role-hint>default</role-hint>
+ <implementation>org.codehaus.plexus.redback.role.DefaultRoleManager</implementation>
+ <description>RoleProfileManager:</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.role.validator.RoleModelValidator</role>
+ <role-hint>default</role-hint>
+ <field-name>modelValidator</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.role.processor.RoleModelProcessor</role>
+ <role-hint>default</role-hint>
+ <field-name>modelProcessor</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.role.template.RoleTemplateProcessor</role>
+ <role-hint>default</role-hint>
+ <field-name>templateProcessor</field-name>
+ </requirement>
+ <requirement>
+ <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>rbacManager</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.role.processor.RoleModelProcessor</role>
+ <role-hint>default</role-hint>
+ <implementation>org.codehaus.plexus.redback.role.processor.DefaultRoleModelProcessor</implementation>
+ <description>DefaultRoleModelProcessor: inserts the components of the model that can be populated into the rbac manager</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>rbacManager</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.codehaus.plexus.redback.role.template.RoleTemplateProcessor</role>
+ <role-hint>default</role-hint>
+ <implementation>org.codehaus.plexus.redback.role.template.DefaultRoleTemplateProcessor</implementation>
+ <description>DefaultRoleTemplateProcessor: inserts the components of a template into the rbac manager</description>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
+ <role-hint>memory</role-hint>
+ <field-name>rbacManager</field-name>
+ </requirement>
+ </requirements>
+ </component>
+
+ <component>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
+ <requirements>
+ <requirement>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ </requirement>
+ </requirements>
+ </component>
+ <component>
+ <role>org.codehaus.plexus.registry.Registry</role>
+ <role-hint>configured</role-hint>
+ <implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
+ <configuration>
+ <properties>
+ <system/>
+ <xml fileName="${basedir}/target/test-conf/archiva.xml"
+ config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
+ </properties>
+ </configuration>
+ </component>
+
+ </components>
+</component-set>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-rss</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xmlrpc-services</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xmlrpc-security</artifactId>
- </dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</dependency>
<!-- Plexus Security Dependencies -->
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-xwork-content</artifactId>
<type>war</type>
<scope>runtime</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-xwork-integration</artifactId>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-taglib</artifactId>
</dependency>
<!-- Other dependencies -->
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- <scope>provided</scope>
- </dependency>
<dependency>
<groupId>javax.mail</groupId>
<artifactId>mail</artifactId>
<artifactId>commons-logging-api</artifactId>
<version>1.1</version>
</dependency>
- <dependency>
- <groupId>com.atlassian.xmlrpc</groupId>
- <artifactId>atlassian-xmlrpc-binder-server-spring</artifactId>
- <scope>runtime</scope>
- </dependency>
</dependencies>
<build>
<resources>
*/
private UserRepositories userRepositories;
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaXworkUser archivaXworkUser;
+
private BrowsingResults results;
private String groupId;
private String getPrincipal()
{
- return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ return archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
}
private List<String> getObservableRepos()
package org.apache.maven.archiva.web.action;
-
+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
-import com.opensymphony.xwork.ActionContext;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.constraints.ArtifactsByChecksumConstraint;
import org.apache.maven.archiva.security.UserRepositories;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
+import com.opensymphony.xwork.ActionContext;
+import com.opensymphony.xwork.Preparable;
+
/**
* Search all indexed fields by the given criteria.
*
*/
public class SearchAction
extends PlexusActionSupport
-{
+ implements Preparable
+{
/**
* Query string.
*/
+
+ private ArchivaConfiguration archivaConfiguration;
+
+ private Map<String, ManagedRepositoryConfiguration> managedRepositories;
+
private String q;
/**
*/
private UserRepositories userRepositories;
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaXworkUser archivaXworkUser;
+
private static final String RESULTS = "results";
private static final String ARTIFACT = "artifact";
private int totalPages;
- private boolean searchResultsOnly;
+ private boolean searchResultsOnly;
private String completeQueryString;
private static final String COMPLETE_QUERY_STRING_SEPARATOR = ";";
-
+
private static final String BYTECODE_KEYWORD = "bytecode:";
+ private List<String> managedRepositoryList;
+
+ private String groupId;
+
+ private String artifactId;
+
+ private String version;
+
+ private String className;
+
+ private int rowCount = 30;
+
+ private String repositoryId;
+
+ private boolean fromFilterSearch;
+
+ private boolean filterSearch = false;
+
+ private boolean fromResultsPage;
+
+ private int num;
+
+ public boolean isFromResultsPage()
+ {
+ return fromResultsPage;
+ }
+
+ public void setFromResultsPage( boolean fromResultsPage )
+ {
+ this.fromResultsPage = fromResultsPage;
+ }
+
+ public boolean isFromFilterSearch()
+ {
+ return fromFilterSearch;
+ }
+
+ public void setFromFilterSearch( boolean fromFilterSearch )
+ {
+ this.fromFilterSearch = fromFilterSearch;
+ }
+
+ public void prepare()
+ {
+ managedRepositoryList = new ArrayList<String>();
+ managedRepositoryList = getObservableRepos();
+
+ if ( managedRepositoryList.size() > 0 )
+ {
+ managedRepositoryList.add( "all" );
+ }
+ }
+
+ // advanced search MRM-90 -- filtered search
+ public String filteredSearch()
+ throws MalformedURLException, RepositoryIndexException, RepositoryIndexSearchException
+ {
+ fromFilterSearch = true;
+
+ if ( CollectionUtils.isEmpty( managedRepositoryList ) )
+ {
+ return GlobalResults.ACCESS_TO_NO_REPOS;
+ }
+
+ SearchResultLimits limits = new SearchResultLimits( currentPage );
+
+ limits.setPageSize( rowCount );
+ List<String> selectedRepos = new ArrayList<String>();
+
+ if ( repositoryId.equals( "all" ) )
+ {
+ selectedRepos = getObservableRepos();
+ }
+ else
+ {
+ selectedRepos.add( repositoryId );
+ }
+
+ if ( CollectionUtils.isEmpty( selectedRepos ) )
+ {
+ return GlobalResults.ACCESS_TO_NO_REPOS;
+ }
+
+ results =
+ crossRepoSearch.executeFilteredSearch( getPrincipal(), selectedRepos, groupId, artifactId, version,
+ className, limits );
+
+ if ( results.isEmpty() )
+ {
+ addActionError( "No results found" );
+ return INPUT;
+ }
+
+ totalPages = results.getTotalHits() / limits.getPageSize();
+
+ if ( ( results.getTotalHits() % limits.getPageSize() ) != 0 )
+ {
+ totalPages = totalPages + 1;
+ }
+
+ return SUCCESS;
+ }
+
public String quickSearch()
throws MalformedURLException, RepositoryIndexException, RepositoryIndexSearchException
{
/* TODO: give action message if indexing is in progress.
* This should be based off a count of 'unprocessed' artifacts.
- * This (yet to be written) routine could tell the user that X (unprocessed) artifacts are not yet
+ * This (yet to be written) routine could tell the user that X (unprocessed) artifacts are not yet
* present in the full text search.
*/
assert q != null && q.length() != 0;
-
+
+ fromFilterSearch = false;
+
SearchResultLimits limits = new SearchResultLimits( currentPage );
-
+
List<String> selectedRepos = getObservableRepos();
if ( CollectionUtils.isEmpty( selectedRepos ) )
{
}
if( isBytecodeSearch( q ) )
- {
+ {
results = crossRepoSearch.searchForBytecode( getPrincipal(), selectedRepos, removeKeyword( q ), limits );
}
else
{
if( searchResultsOnly && !completeQueryString.equals( "" ) )
- {
+ {
results = crossRepoSearch.searchForTerm( getPrincipal(), selectedRepos, q, limits, parseCompleteQueryString() );
}
else
results = crossRepoSearch.searchForTerm( getPrincipal(), selectedRepos, q, limits );
}
}
-
+
if ( results.isEmpty() )
{
addActionError( "No results found" );
return INPUT;
}
-
+
totalPages = results.getTotalHits() / limits.getPageSize();
-
+
if( (results.getTotalHits() % limits.getPageSize()) != 0 )
{
totalPages = totalPages + 1;
}
// TODO: filter / combine the artifacts by version? (is that even possible with non-artifact hits?)
- /* I don't think that we should, as I expect us to utilize the 'score' system in lucene in
+ /* I don't think that we should, as I expect us to utilize the 'score' system in lucene in
* the future to return relevant links better.
* I expect the lucene scoring system to take multiple hits on different areas of a single document
- * to result in a higher score.
+ * to result in a higher score.
* - Joakim
*/
-
+
if( !isEqualToPreviousSearchTerm( q ) )
{
buildCompleteQueryString( q );
}
-
+
return SUCCESS;
}
if ( databaseResults.size() == 1 )
{
- // 1 hit? return it's information directly!
+ // 1 hit? return it's information directly!
return ARTIFACT;
}
-
+
return RESULTS;
}
{
return INPUT;
}
-
+
private String getPrincipal()
{
- return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ return archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
}
-
+
private List<String> getObservableRepos()
{
try
private void buildCompleteQueryString( String searchTerm )
{
- if( searchTerm.indexOf( COMPLETE_QUERY_STRING_SEPARATOR ) != -1 )
+ if ( searchTerm.indexOf( COMPLETE_QUERY_STRING_SEPARATOR ) != -1 )
{
searchTerm = StringUtils.remove( searchTerm, COMPLETE_QUERY_STRING_SEPARATOR );
}
-
- if( completeQueryString == null || "".equals( completeQueryString ) )
+
+ if ( completeQueryString == null || "".equals( completeQueryString ) )
{
completeQueryString = searchTerm;
}
else
- {
+ {
completeQueryString = completeQueryString + COMPLETE_QUERY_STRING_SEPARATOR + searchTerm;
}
}
-
+
private List<String> parseCompleteQueryString()
{
- List<String> parsedCompleteQueryString = new ArrayList<String>();
+ List<String> parsedCompleteQueryString = new ArrayList<String>();
String[] parsed = StringUtils.split( completeQueryString, COMPLETE_QUERY_STRING_SEPARATOR );
CollectionUtils.addAll( parsedCompleteQueryString, parsed );
-
+
return parsedCompleteQueryString;
}
-
+
private boolean isEqualToPreviousSearchTerm( String searchTerm )
{
- if( !"".equals( completeQueryString ) )
+ if ( !"".equals( completeQueryString ) )
{
String[] parsed = StringUtils.split( completeQueryString, COMPLETE_QUERY_STRING_SEPARATOR );
- if( StringUtils.equalsIgnoreCase( searchTerm, parsed[ parsed.length - 1 ] ) )
+ if ( StringUtils.equalsIgnoreCase( searchTerm, parsed[parsed.length - 1] ) )
{
return true;
}
}
-
+
return false;
}
-
+
public String getQ()
{
return q;
{
return databaseResults;
}
-
+
public void setCurrentPage( int page )
{
this.currentPage = page;
}
-
+
public int getCurrentPage()
{
return currentPage;
public void setCompleteQueryString( String completeQueryString )
{
this.completeQueryString = completeQueryString;
- }
-
+ }
+
private boolean isBytecodeSearch( String queryString )
{
- if( queryString.startsWith( BYTECODE_KEYWORD ) )
+ if ( queryString.startsWith( BYTECODE_KEYWORD ) )
{
- return true;
+ return true;
}
-
+
return false;
}
-
+
private String removeKeyword( String queryString )
- {
+ {
String qString = StringUtils.uncapitalize( queryString );
- qString= StringUtils.remove( queryString, BYTECODE_KEYWORD );
-
+ qString = StringUtils.remove( queryString, BYTECODE_KEYWORD );
+
return qString;
}
+
+ public ArchivaConfiguration getArchivaConfiguration()
+ {
+ return archivaConfiguration;
+ }
+
+ public void setArchivaConfiguration( ArchivaConfiguration archivaConfiguration )
+ {
+ this.archivaConfiguration = archivaConfiguration;
+ }
+
+ public Map<String, ManagedRepositoryConfiguration> getManagedRepositories()
+ {
+ return getArchivaConfiguration().getConfiguration().getManagedRepositoriesAsMap();
+ }
+
+ public void setManagedRepositories( Map<String, ManagedRepositoryConfiguration> managedRepositories )
+ {
+ this.managedRepositories = managedRepositories;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public void setVersion( String version )
+ {
+ this.version = version;
+ }
+
+ public int getRowCount()
+ {
+ return rowCount;
+ }
+
+ public void setRowCount( int rowCount )
+ {
+ this.rowCount = rowCount;
+ }
+
+ public boolean isFilterSearch()
+ {
+ return filterSearch;
+ }
+
+ public void setFilterSearch( boolean filterSearch )
+ {
+ this.filterSearch = filterSearch;
+ }
+
+ public String getRepositoryId()
+ {
+ return repositoryId;
+ }
+
+ public void setRepositoryId( String repositoryId )
+ {
+ this.repositoryId = repositoryId;
+ }
+
+ public List<String> getManagedRepositoryList()
+ {
+ return managedRepositoryList;
+ }
+
+ public void setManagedRepositoryList( List<String> managedRepositoryList )
+ {
+ this.managedRepositoryList = managedRepositoryList;
+ }
+
+ public String getClassName()
+ {
+ return className;
+ }
+
+ public void setClassName( String className )
+ {
+ this.className = className;
+ }
}
* @plexus.requirement
*/
private UserRepositories userRepositories;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaXworkUser archivaXworkUser;
/* .\ Input Parameters \.________________________________________ */
private String getPrincipal()
{
- return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ return archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
}
private List<String> getObservableRepos()
* @plexus.requirement
*/
private RepositoryContentConsumers consumers;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaXworkUser archivaXworkUser;
/**
* The groupId of the artifact to be deployed.
private String getPrincipal()
{
- return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ return archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
}
private void copyFile( File sourceFile, File targetPath, String targetFilename )
{
private Map<String, AbstractRepositoryConfiguration> repoMap;
+ /**
+ * boolean to indicate that remote repo is present. Used for Add Link
+ */
+ private boolean remoteRepoExists=false;
+
/**
* Map of Proxy Connectors.
*/
repoMap.putAll( config.getManagedRepositoriesAsMap() );
proxyConnectorMap = createProxyConnectorMap();
+
+ remoteRepoExists=config.getRemoteRepositories().size()>0;
}
public Map<String, AbstractRepositoryConfiguration> getRepoMap()
{
return proxyConnectorMap;
}
+
+ public boolean getRemoteRepoExists()
+ {
+ return remoteRepoExists;
+ }
}
}
if ( !file.exists() || !file.isDirectory() )
{
- throw new IOException( "unable to add repository - can not create the root directory: " + file );
+ throw new IOException( "Unable to add repository - no write access, can not create the root directory: " + file );
}
configuration.addManagedRepository( repository );
import com.opensymphony.webwork.interceptor.ServletRequestAware;
import com.opensymphony.xwork.Preparable;
+
+import org.apache.commons.lang.time.DateFormatUtils;
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
+import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.Constraint;
+import org.apache.maven.archiva.database.ObjectNotFoundException;
+import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
import org.apache.maven.archiva.database.constraints.RangeConstraint;
+import org.apache.maven.archiva.database.constraints.RepositoryContentStatisticsByRepositoryConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByRepositoryIdConstraint;
import org.apache.maven.archiva.database.constraints.RepositoryProblemConstraint;
import org.apache.maven.archiva.database.constraints.UniqueFieldConstraint;
+import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.model.RepositoryProblemReport;
+import org.apache.maven.archiva.reporting.ArchivaReportException;
+import org.apache.maven.archiva.reporting.DataLimits;
+import org.apache.maven.archiva.reporting.RepositoryStatistics;
+import org.apache.maven.archiva.reporting.RepositoryStatisticsReportGenerator;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.plexus.redback.xwork.interceptor.SecureAction;
import org.codehaus.plexus.redback.xwork.interceptor.SecureActionBundle;
import org.codehaus.plexus.redback.xwork.interceptor.SecureActionException;
import org.codehaus.plexus.xwork.action.PlexusActionSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
+
+import java.text.ParseException;
import java.util.ArrayList;
+import java.util.Calendar;
import java.util.Collection;
+import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
extends PlexusActionSupport
implements SecureAction, ServletRequestAware, Preparable
{
+ private Logger log = LoggerFactory.getLogger( GenerateReportAction.class );
+
/**
* @plexus.requirement role-hint="jdo"
*/
protected ArchivaDAO dao;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaConfiguration archivaConfiguration;
protected Constraint constraint;
protected Map<String, List<RepositoryProblemReport>> repositoriesMap =
new TreeMap<String, List<RepositoryProblemReport>>();
-
+
+ // for statistics report
+ /**
+ * @plexus.requirement role-hint="simple"
+ */
+ private RepositoryStatisticsReportGenerator generator;
+
+ private List<String> selectedRepositories = new ArrayList<String>();
+
+ private List<String> availableRepositories;
+
+ private String startDate;
+
+ private String endDate;
+
+ private int reposSize;
+
+ private String selectedRepo;
+
+ private List<RepositoryStatistics> repositoryStatistics = new ArrayList<RepositoryStatistics>();
+
+ private DataLimits limits = new DataLimits();
+
+ private String[] datePatterns = new String[] { "MM/dd/yy", "MM/dd/yyyy", "MMMMM/dd/yyyy", "MMMMM/dd/yy",
+ "dd MMMMM yyyy", "dd/MM/yy", "dd/MM/yyyy", "yyyy/MM/dd" };
+
public void prepare()
{
repositoryIds = new ArrayList<String>();
repositoryIds.add( ALL_REPOSITORIES ); // comes first to be first in the list
repositoryIds.addAll(
dao.query( new UniqueFieldConstraint( RepositoryProblem.class.getName(), "repositoryId" ) ) );
+
+ availableRepositories = new ArrayList<String>();
+
+ // remove selected repositories in the option for the statistics report
+ availableRepositories.addAll( archivaConfiguration.getConfiguration().getManagedRepositoriesAsMap().keySet() );
+ for( String repo : selectedRepositories )
+ {
+ if( availableRepositories.contains( repo ) )
+ {
+ availableRepositories.remove( repo );
+ }
+ }
}
public Collection<String> getRepositoryIds()
return repositoryIds;
}
+ /**
+ * Generate the statistics report.
+ *
+ * check whether single repo report or comparison report
+ * 1. if it is a single repository, get all the statistics for the repository on the specified date
+ * - if no date is specified, get only the latest
+ * (total page = 1 --> no pagination since only the most recent stats will be displayed)
+ * - otherwise, get everything within the date range (total pages = repo stats / rows per page)
+ * - required params: repository, startDate, endDate
+ *
+ * 2. if multiple repositories, get the latest statistics on each repository on the specified date
+ * - if no date is specified, use the current date endDate
+ * - required params: repositories, endDate
+ * - total pages = repositories / rows per page
+ *
+ * @return
+ */
+ public String generateStatistics()
+ {
+ if( rowCount < 10 )
+ {
+ addFieldError( "rowCount", "Row count must be larger than 10." );
+ return INPUT;
+ }
+ reposSize = selectedRepositories.size();
+ Date startDateInDateFormat = null;
+ Date endDateInDateFormat = null;
+
+ if( startDate == null || "".equals( startDate ) )
+ {
+ startDateInDateFormat = getDefaultStartDate();
+ }
+ else
+ {
+ try
+ {
+ startDateInDateFormat = DateUtils.parseDate( startDate, datePatterns );
+ }
+ catch ( ParseException e )
+ {
+ addFieldError( "startDate", "Invalid date format.");
+ return INPUT;
+ }
+ }
+
+ if( endDate == null || "".equals( endDate ) )
+ {
+ endDateInDateFormat = getDefaultEndDate();
+ }
+ else
+ {
+ try
+ {
+ endDateInDateFormat = DateUtils.parseDate( endDate, datePatterns );
+ }
+ catch ( ParseException e )
+ {
+ addFieldError( "endDate", "Invalid date format.");
+ return INPUT;
+ }
+ }
+
+ try
+ {
+ RepositoryContentStatisticsDAO repoContentStatsDao = dao.getRepositoryContentStatisticsDAO();
+ if( selectedRepositories.size() > 1 )
+ {
+ limits.setTotalCount( selectedRepositories.size() );
+ limits.setCurrentPage( 1 );
+ limits.setPerPageCount( 1 );
+ limits.setCountOfPages( 1 );
+
+ // multiple repos
+ for( String repo : selectedRepositories )
+ {
+ try
+ {
+ List contentStats = repoContentStatsDao.queryRepositoryContentStatistics(
+ new RepositoryContentStatisticsByRepositoryConstraint( repo, startDateInDateFormat, endDateInDateFormat ) );
+
+ if( contentStats == null || contentStats.isEmpty() )
+ {
+ log.info( "No statistics available for repository '" + repo + "'." );
+ // TODO set repo's stats to 0
+
+ continue;
+ }
+ repositoryStatistics.addAll( generator.generateReport( contentStats, repo, startDateInDateFormat, endDateInDateFormat, limits ) );
+ }
+ catch ( ObjectNotFoundException oe )
+ {
+ log.error( "No statistics available for repository '" + repo + "'." );
+ // TODO set repo's stats to 0
+ }
+ catch ( ArchivaDatabaseException ae )
+ {
+ log.error( "Error encountered while querying statistics of repository '" + repo + "'." );
+ // TODO set repo's stats to 0
+ }
+ }
+ }
+ else if ( selectedRepositories.size() == 1 )
+ {
+ limits.setCurrentPage( getPage() );
+ limits.setPerPageCount( getRowCount() );
+
+ selectedRepo = selectedRepositories.get( 0 );
+ try
+ {
+ List<RepositoryContentStatistics> contentStats = repoContentStatsDao.queryRepositoryContentStatistics(
+ new RepositoryContentStatisticsByRepositoryConstraint( selectedRepo, startDateInDateFormat, endDateInDateFormat ) );
+
+ if( contentStats == null || contentStats.isEmpty() )
+ {
+ addActionError( "No statistics available for repository. Repository might not have been scanned." );
+ return ERROR;
+ }
+
+ limits.setTotalCount( contentStats.size() );
+ int extraPage = ( limits.getTotalCount() % limits.getPerPageCount() ) != 0 ? 1 : 0;
+ int totalPages = ( limits.getTotalCount() / limits.getPerPageCount() ) + extraPage;
+ limits.setCountOfPages( totalPages );
+
+ repositoryStatistics = generator.generateReport( contentStats, selectedRepo, startDateInDateFormat, endDateInDateFormat, limits );
+ }
+ catch ( ObjectNotFoundException oe )
+ {
+ addActionError( oe.getMessage() );
+ return ERROR;
+ }
+ catch ( ArchivaDatabaseException de )
+ {
+ addActionError( de.getMessage() );
+ return ERROR;
+ }
+ }
+ else
+ {
+ addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
+ return INPUT;
+ }
+
+ if( repositoryStatistics.isEmpty() )
+ {
+ return BLANK;
+ }
+
+ if( startDate.equals( getDefaultStartDate() ) )
+ {
+ startDate = null;
+ }
+ else
+ {
+ startDate = DateFormatUtils.format( startDateInDateFormat, "MM/dd/yyyy" );
+ }
+
+ endDate = DateFormatUtils.format( endDateInDateFormat, "MM/dd/yyyy" );
+ }
+ catch ( ArchivaReportException e )
+ {
+ addActionError( "Error encountered while generating report :: " + e.getMessage() );
+ return ERROR;
+ }
+
+ return SUCCESS;
+ }
+
+ private Date getDefaultStartDate()
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.clear();
+ cal.set( 1900, 1, 1, 0, 0, 0 );
+
+ return cal.getTime();
+ }
+
+ private Date getDefaultEndDate()
+ {
+ return Calendar.getInstance().getTime();
+ }
+
public String execute()
throws Exception
- {
+ {
+ if( repositoryId == null )
+ {
+ addFieldError( "repositoryId", "You must provide a repository id.");
+ return INPUT;
+ }
+
+ if( rowCount < 10 )
+ {
+ addFieldError( "rowCount", "Row count must be larger than 10." );
+ return INPUT;
+ }
+
List<RepositoryProblem> problemArtifacts =
dao.getRepositoryProblemDAO().queryRepositoryProblems( configureConstraint() );
return SUCCESS;
}
}
-
+
private static boolean isJasperPresent()
{
if ( jasperPresent == null )
return constraint;
}
+
+ public SecureActionBundle getSecureActionBundle()
+ throws SecureActionException
+ {
+ SecureActionBundle bundle = new SecureActionBundle();
+
+ bundle.setRequiresAuthentication( true );
+ bundle.addRequiredAuthorization( ArchivaRoleConstants.OPERATION_ACCESS_REPORT, Resource.GLOBAL );
+
+ return bundle;
+ }
+
+ private void addToList( RepositoryProblemReport repoProblemReport )
+ {
+ List<RepositoryProblemReport> problemsList = null;
+
+ if ( repositoriesMap.containsKey( repoProblemReport.getRepositoryId() ) )
+ {
+ problemsList = ( List<RepositoryProblemReport> ) repositoriesMap.get( repoProblemReport.getRepositoryId() );
+ }
+ else
+ {
+ problemsList = new ArrayList<RepositoryProblemReport>();
+ repositoriesMap.put( repoProblemReport.getRepositoryId(), problemsList );
+ }
+
+ problemsList.add( repoProblemReport );
+ }
public void setServletRequest( HttpServletRequest request )
{
return repositoriesMap;
}
- public SecureActionBundle getSecureActionBundle()
- throws SecureActionException
+ public List<String> getSelectedRepositories()
{
- SecureActionBundle bundle = new SecureActionBundle();
+ return selectedRepositories;
+ }
- bundle.setRequiresAuthentication( true );
- bundle.addRequiredAuthorization( ArchivaRoleConstants.OPERATION_ACCESS_REPORT, Resource.GLOBAL );
+ public void setSelectedRepositories( List<String> selectedRepositories )
+ {
+ this.selectedRepositories = selectedRepositories;
+ }
- return bundle;
+ public List<String> getAvailableRepositories()
+ {
+ return availableRepositories;
+ }
+
+ public void setAvailableRepositories( List<String> availableRepositories )
+ {
+ this.availableRepositories = availableRepositories;
+ }
+
+ public String getStartDate()
+ {
+ return startDate;
+ }
+
+ public void setStartDate( String startDate )
+ {
+ this.startDate = startDate;
+ }
+
+ public String getEndDate()
+ {
+ return endDate;
+ }
+
+ public void setEndDate( String endDate )
+ {
+ this.endDate = endDate;
+ }
+
+ public List<RepositoryStatistics> getRepositoryStatistics()
+ {
+ return repositoryStatistics;
+ }
+
+ public void setRepositoryStatistics( List<RepositoryStatistics> repositoryStatistics )
+ {
+ this.repositoryStatistics = repositoryStatistics;
}
- private void addToList( RepositoryProblemReport repoProblemReport )
+ public int getReposSize()
+ {
+ return reposSize;
+ }
+
+ public void setReposSize( int reposSize )
+ {
+ this.reposSize = reposSize;
+ }
+
+ public String getSelectedRepo()
+ {
+ return selectedRepo;
+ }
+
+ public void setSelectedRepo( String selectedRepo )
+ {
+ this.selectedRepo = selectedRepo;
+ }
+
+ public DataLimits getLimits()
+ {
+ return limits;
+ }
+
+ public void setLimits( DataLimits limits )
{
- List<RepositoryProblemReport> problemsList = null;
-
- if ( repositoriesMap.containsKey( repoProblemReport.getRepositoryId() ) )
- {
- problemsList = ( List<RepositoryProblemReport> ) repositoriesMap.get( repoProblemReport.getRepositoryId() );
- }
- else
- {
- problemsList = new ArrayList<RepositoryProblemReport>();
- repositoriesMap.put( repoProblemReport.getRepositoryId(), problemsList );
- }
-
- problemsList.add( repoProblemReport );
+ this.limits = limits;
}
}
import org.apache.commons.codec.Decoder;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.security.AccessDeniedException;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.security.ArchivaSecurityException;
+import org.apache.maven.archiva.security.ArchivaXworkUser;
import org.apache.maven.archiva.security.PrincipalNotFoundException;
import org.apache.maven.archiva.security.ServletAuthenticator;
import org.apache.maven.archiva.security.UserRepositories;
private ServletAuthenticator servletAuth;
private HttpAuthenticator httpAuth;
+
+ private ArchivaXworkUser archivaXworkUser;
public void init( javax.servlet.ServletConfig servletConfig )
throws ServletException
(ServletAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( ServletAuthenticator.class.getName() ) );
httpAuth =
(HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE, "basic" ) );
+ archivaXworkUser = (ArchivaXworkUser) wac.getBean( PlexusToSpringUtils.buildSpringId( ArchivaXworkUser.class ) );
}
public void doGet( HttpServletRequest req, HttpServletResponse res )
throws ServletException, IOException
{
- String repoId = req.getParameter( "repoId" );
- String groupId = req.getParameter( "groupId" );
- String artifactId = req.getParameter( "artifactId" );
+ String repoId = null;
+ String groupId = null;
+ String artifactId = null;
+
+ String url = StringUtils.removeEnd( req.getRequestURL().toString(), "/" );
+ if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) > 0 )
+ {
+ artifactId = StringUtils.substringAfterLast( url, "/" );
+ groupId = StringUtils.substringBeforeLast( StringUtils.substringAfter( url, "feeds/" ), "/");
+ groupId = StringUtils.replaceChars( groupId, '/', '.' );
+ }
+ else if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) == 0 )
+ {
+ repoId = StringUtils.substringAfterLast( url, "/" );
+ }
+ else
+ {
+ res.sendError( HttpServletResponse.SC_BAD_REQUEST, "Invalid request url." );
+ return;
+ }
try
{
Map<String, String> map = new HashMap<String, String>();
SyndFeed feed = null;
-
- if ( ( repoId == null ) && ( groupId == null && artifactId == null ) )
- {
- res.sendError( HttpServletResponse.SC_BAD_REQUEST, "Required fields not found in request." );
- return;
- }
-
- if ( isAllowed( req ) )
+
+ if ( isAllowed( req, repoId, groupId, artifactId ) )
{
if ( repoId != null )
{
}
res.setContentType( MIME_TYPE );
-
+
if ( repoId != null )
- {
- feed.setLink( req.getRequestURL() + "?repoId=" + repoId );
+ {
+ feed.setLink( req.getRequestURL().toString() );
}
else if ( ( groupId != null ) && ( artifactId != null ) )
{
- feed.setLink( req.getRequestURL() + "?groupId=" + groupId + "&artifactId=" + artifactId );
+ feed.setLink( req.getRequestURL().toString() );
}
SyndFeedOutput output = new SyndFeedOutput();
* Basic authentication.
*
* @param req
+ * @param repositoryId TODO
+ * @param groupId TODO
+ * @param artifactId TODO
* @return
*/
- private boolean isAllowed( HttpServletRequest req )
+ private boolean isAllowed( HttpServletRequest req, String repositoryId, String groupId, String artifactId )
throws UserNotFoundException, AccountLockedException, AuthenticationException, MustChangePasswordException,
UnauthorizedException
{
String auth = req.getHeader( "Authorization" );
List<String> repoIds = new ArrayList<String>();
- if ( req.getParameter( "repoId" ) != null )
+ if ( repositoryId != null )
{
- repoIds.add( req.getParameter( "repoId" ) );
+ repoIds.add( repositoryId );
}
- else if ( req.getParameter( "artifactId" ) != null && req.getParameter( "groupId" ) != null )
+ else if ( artifactId != null && groupId != null )
{
if ( auth != null )
{
if ( usernamePassword == null || usernamePassword.trim().equals( "" ) )
{
- repoIds = getObservableRepos( ArchivaRoleConstants.PRINCIPAL_GUEST );
+ repoIds = getObservableRepos( archivaXworkUser.getGuest() );
}
else
{
}
else
{
- repoIds = getObservableRepos( ArchivaRoleConstants.PRINCIPAL_GUEST );
+ repoIds = getObservableRepos( archivaXworkUser.getGuest() );
}
}
else
StringBuffer baseUrl = new StringBuffer();
baseUrl.append( request.getScheme() ).append( "://" );
- baseUrl.append( request.getServerName() );
- int portnum = request.getServerPort();
-
- // Only add port if non-standard.
- Integer defaultPortnum = (Integer) defaultSchemePortMap.get( request.getScheme() );
- if ( ( defaultPortnum == null ) || ( defaultPortnum.intValue() != portnum ) )
- {
- baseUrl.append( ":" ).append( String.valueOf( portnum ) );
- }
+ baseUrl.append( getServerName( request ) );
baseUrl.append( request.getContextPath() );
if ( StringUtils.isNotBlank( resource ) )
return baseUrl.toString();
}
+
+ private static String getServerName( HttpServletRequest request )
+ {
+ String name = request.getHeader( "X-Forwarded-Host" );
+ if ( name == null )
+ {
+ name = request.getServerName();
+ int portnum = request.getServerPort();
+
+ // Only add port if non-standard.
+ Integer defaultPortnum = (Integer) defaultSchemePortMap.get( request.getScheme() );
+ if ( ( defaultPortnum == null ) || ( defaultPortnum.intValue() != portnum ) )
+ {
+ name = name + ":" + String.valueOf( portnum );
+ }
+ return name;
+ }
+ return name;
+ }
}
<appender-ref ref="auditlog" />
</logger>
- <logger name="org.codehaus.plexus.security">
+ <logger name="org.codehaus.plexus.redback">
<level value="info"/>
<appender-ref ref="rolling" />
</logger>
<priority value ="info" />
</root>
-</log4j:configuration>
\ No newline at end of file
+</log4j:configuration>
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
+ "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
+
+<validators>
+ <field name="rowCount">
+ <field-validator type="int">
+ <message>Invalid entry</message>
+ </field-validator>
+ <field-validator type="int">
+ <param name="min">1</param>
+ <message>Row count must be larger than ${min}.</message>
+ </field-validator>
+ </field>
+</validators>
+++ /dev/null
-<?xml version="1.0" encoding="ISO-8859-1"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<!DOCTYPE validators PUBLIC "-//OpenSymphony Group//XWork Validator 1.0.2//EN"
- "http://www.opensymphony.com/xwork/xwork-validator-1.0.2.dtd">
-
-<validators>
- <field name="rowCount">
- <field-validator type="int">
- <param name="min">10</param>
- <message>Row count must be larger than ${min}.</message>
- </field-validator>
- </field>
- <!--
- <field name="groupId">
- <field-validator type="regex">
- <param name="expression"><![CDATA[([a-zA-Z0-9]+[a-zA-Z0-9.]*)]]></param>
- <message>You must provide a valid group id.</message>
- </field-validator>
- </field>
- -->
- <field name="repositoryId">
- <field-validator type="requiredstring">
- <message>You must provide a repository id.</message>
- </field-validator>
- </field>
-</validators>
<action name="index" class="searchAction" method="input">
<result name="input">/WEB-INF/jsp/quickSearch.jsp</result>
</action>
+
+ <action name="advancedSearch" class="searchAction" method="input">
+ <result name="input">/WEB-INF/jsp/advancedSearch.jsp</result>
+ </action>
+
+ <action name="filteredSearch" class="searchAction" method="filteredSearch">
+ <result name="input">/WEB-INF/jsp/advancedSearch.jsp</result>
+ <result>/WEB-INF/jsp/results.jsp</result>
+ <result name="error">/WEB-INF/jsp/quickSearch.jsp</result>
+ </action>
<action name="quickSearch" class="searchAction" method="quickSearch">
<result name="input">/WEB-INF/jsp/quickSearch.jsp</result>
<result name="blank">/WEB-INF/jsp/reports/blankReport.jsp</result>
<result>/WEB-INF/jsp/reports/basicReport.jsp</result>
</action>
+
+ <action name="generateStatisticsReport" class="generateReport" method="generateStatistics">
+ <result name="input">/WEB-INF/jsp/reports/pickReport.jsp</result>
+ <result name="blank">/WEB-INF/jsp/reports/blankReport.jsp</result>
+ <result>/WEB-INF/jsp/reports/statisticsReport.jsp</result>
+ </action>
+
</package>
</xwork>
<bean id="propertyPlaceholder" class="org.springframework.beans.factory.config.PropertyPlaceholderConfigurer">
<property name="location" value="classpath:application.properties" />
</bean>
-
-
- <bean name="testXmlRpcService" lazy-init="true" scope="singleton" class="org.apache.maven.archiva.web.xmlrpc.services.PingServiceImpl"/>
-
- <bean name="xmlrpcServicesList" class="java.util.ArrayList">
- <constructor-arg>
- <ref bean="testXmlRpcService"/>
- </constructor-arg>
- </bean>
-
- <bean name="xmlRpcAuthenticator" class="org.apache.maven.archiva.xmlrpc.security.XmlRpcAuthenticator">
- <constructor-arg>
- <ref bean="securitySystem"/>
- </constructor-arg>
- </bean>
-
</beans>
<h1>Admin: Add Managed Repository</h1>
<div id="contentArea">
-
+ <ww:actionerror/>
<ww:actionmessage/>
<ww:form method="post" action="addRepository!commit" namespace="/admin" validate="true">
<ww:textfield name="repository.id" label="Identifier" size="10" required="true"/>
<ww:actionerror/>
<ww:actionmessage/>
-<div style="float:right">
- <redback:ifAnyAuthorized permissions="archiva-manage-configuration">
- <ww:url id="addProxyConnectorUrl" action="addProxyConnector"/>
- <ww:a href="%{addProxyConnectorUrl}" cssClass="create">
- <img src="<c:url value="/images/icons/create.png" />"/>
- Add
- </ww:a>
- </redback:ifAnyAuthorized>
+<div style="float:right">
+ <c:choose>
+ <c:when test="${remoteRepoExists}">
+ <redback:ifAnyAuthorized permissions="archiva-manage-configuration">
+ <ww:url id="addProxyConnectorUrl" action="addProxyConnector"/>
+ <ww:a href="%{addProxyConnectorUrl}" cssClass="create">
+ <img src="<c:url value="/images/icons/create.png" />"/>
+ Add
+ </ww:a>
+ </redback:ifAnyAuthorized>
+ </c:when>
+ <c:otherwise>
+ <img src="<c:url value="/images/icons/create.png" />"/>
+ Add (Disabled. No remote repositories)
+ </c:otherwise>
+ </c:choose>
</div>
<h2>Repository Proxy Connectors</h2>
</ww:a>
</redback:ifAnyAuthorized>
<c:url var="rssFeedIconUrl" value="/images/icons/rss-feed.png"/>
- <a href="/archiva/rss/rss_feeds?repoId=${repository.id}">
+ <a href="/archiva/feeds/${repository.id}">
<img src="${rssFeedIconUrl}" />
</a>
</div>
</c:forEach>
</div> <%-- admin --%>
-</div> <%-- content area --%>
</c:otherwise>
</c:choose>
+</div> <%-- content area --%>
+
+</body>
+</html>
+
--- /dev/null
+<%--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --%>
+
+<%@ taglib prefix="ww" uri="/webwork" %>
+<html>
+<head>
+ <title>Advanced Search</title>
+ <ww:head/>
+</head>
+
+<ww:if test="%{infoMessage != null}">
+ <p>${infoMessage}</p>
+</ww:if>
+
+<body>
+
+<h1>Advanced Search</h1>
+
+
+<div id="contentArea">
+
+ <div id="searchBox">
+
+ <ww:form method="get" action="filteredSearch" validate="true">
+ <ww:textfield label="Row Count" size="50" name="rowCount"/>
+ <ww:textfield label="Group Id" size="50" name="groupId"/>
+ <ww:textfield label="Artifact Id" size="50" name="artifactId"/>
+ <ww:textfield label="Version" size="50" name="version"/>
+ <ww:textfield label="Class / Package" size="50" name="className"/>
+ <ww:select name="repositoryId" label="Repository ID" list="managedRepositoryList"/>
+ <ww:hidden name="completeQueryString" value="${completeQueryString}"/>
+ <ww:hidden name="fromFilterSearch" value="${fromFilterSearch}"/>
+ <ww:submit label="Go!"/>
+ </ww:form>
+
+ <ww:url id="indexUrl" action="index"/>
+ <ww:a href="%{indexUrl}">
+ Quick Search Page
+ </ww:a>
+
+ </div>
+
+ <script type="text/javascript">
+ document.getElementById("filteredSearch_groupId").focus();
+ </script>
+ <ww:actionerror/>
+
+</div>
+
+</body>
+</html>
<ww:param name="groupId" value="%{'${results.selectedGroupId}'}"/>
<ww:param name="artifactId" value="%{'${artifactId}'}"/>
</ww:url>
- </c:set>
+ </c:set>
+ <c:url var="rssUrl" value="/feeds/${groupId}/${artifactId}"/>
<li>
<a href="${url}">${artifactId}/</a>
- <a href="/archiva/rss/rss_feeds?groupId=${groupId}&artifactId=${artifactId}">
- <img src="${rssFeedIconUrl}" />
- </a>
- </li>
+ <a href="${rssUrl}">
+ <img src="${rssFeedIconUrl}" />
+ </a>
+ </li>
</c:forEach>
</ul>
</div>
<my:currentWWUrl action="userlist" namespace="/security">User Management</my:currentWWUrl>
</li>
</redback:ifAuthorized>
+ <redback:ifAuthorized permission="archiva-manage-users">
+ <li class="none">
+ <my:currentWWUrl action="roles" namespace="/security">User Roles</my:currentWWUrl>
+ </li>
+ </redback:ifAuthorized>
<redback:ifAuthorized permission="archiva-manage-configuration">
<li class="none">
<my:currentWWUrl action="configureAppearance" namespace="/admin">Appearance</my:currentWWUrl>
</table>
<c:if test="${model.packaging != 'pom'}">
- <h2>POM Dependency Snippet</h2>
+ <h2>POM Snippet</h2>
+ <c:choose>
+ <c:when test="${model.packaging == 'maven-plugin'}">
+<pre class="pom">
+ <plugin>
+ <groupId>${model.groupId}</groupId>
+ <artifactId>${model.artifactId}</artifactId>
+ <version>${version}</version>
+ </plugin>
+</pre>
+ </c:when>
+ <c:otherwise>
<pre class="pom">
<dependency>
<groupId>${model.groupId}</groupId>
- <artifactId>${model.artifactId}</artifactId>
+ <artifactId>${model.artifactId}</artifactId>
<version>${version}</version><c:if test="${model.packaging != 'jar'}">
<type>${model.packaging}</type></c:if>
</dependency>
</pre>
+ </c:otherwise>
+ </c:choose>
</c:if>
<c:if test="${!empty (model.url) || model.organization != null || !empty (model.licenses)
+++ /dev/null
-<%--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- --%>
-
-<%@ taglib prefix="ww" uri="/webwork" %>
-
-<div id="searchBox">
- <ww:form method="get" action="quickSearch" validate="true">
- <ww:textfield label="Search for" size="50" name="q"/>
- <ww:checkbox label="Search Results Only" name="searchResultsOnly"/>
- <ww:hidden name="completeQueryString" value="${completeQueryString}"/>
- <ww:submit label="Go!"/>
- </ww:form>
-
- <script type="text/javascript">
- document.getElementById("quickSearch_q").focus();
- </script>
-
- <p>
- Enter your search terms. A variety of data will be searched for your keywords.<br/>
- To search for java classes, packages or methods, append the keyword "<i>bytecode:</i>"
- to your query. Example, bytecode:MyClass or bytecode:myMethod
- <ww:actionerror/>
- </p>
-</div>
\ No newline at end of file
<h1>Search</h1>
<div id="contentArea">
- <%@ include file="/WEB-INF/jsp/include/quickSearchForm.jspf" %>
+<div id="searchBox">
+ <ww:form method="get" action="quickSearch" validate="true">
+ <ww:textfield label="Search for" size="50" name="q"/>
+ <ww:hidden name="completeQueryString" value="${completeQueryString}"/>
+ <ww:submit label="Go!"/>
+ </ww:form>
+
+ <script type="text/javascript">
+ document.getElementById("quickSearch_q").focus();
+ </script>
+
+ <ww:url id="filteredSearchUrl" action="advancedSearch"/>
+ <ww:a href="%{filteredSearchUrl}">
+ Advanced Search
+ </ww:a>
+
+ <p>
+ <ww:actionerror/>
+ </p>
</div>
+ <p>
+ Enter your search terms. A variety of data will be searched for your keywords.<br/>
+ To search for Java classes, packages or methods, use the keyword <code>bytecode:</code>
+ before the term. For example:
+ <code>bytecode:MyClass</code>, or:
+ <code>bytecode:myMethod</code>
+ </p>
+</div>
</body>
-</html>
\ No newline at end of file
+</html>
<body>
<h1>Reports</h1>
-
+
<div id="contentArea">
- <ww:form action="generateReport" namespace="/report" validate="true">
+ <h2>Repository Statistics</h2>
+ <ww:form action="generateStatisticsReport" namespace="/report" validate="true">
+
+ <ww:optiontransferselect label="Repositories To Be Compared" name="availableRepositories"
+ list="availableRepositories" doubleName="selectedRepositories"
+ doubleList="selectedRepositories" size="8" doubleSize="8"/>
+
+ <ww:datepicker label="Start Date" name="startDate" id="startDate"/>
+ <ww:datepicker label="End Date" name="endDate" id="endDate"/>
+ <ww:textfield label="Row Count" name="rowCount" />
+
+ <ww:submit value="View Statistics"/>
+ </ww:form>
+
+ <h2>Repository Health</h2>
+ <ww:form namespace="/report" action="generateReport" validate="true">
<ww:textfield label="Row Count" name="rowCount" />
<ww:textfield label="Group ID" name="groupId"/>
- <ww:select label="Repository ID" name="repositoryId" list="repositoryIds"/>
+ <ww:select label="Repository ID" name="repositoryId" list="repositoryIds"/>
+
<ww:submit value="Show Report"/>
</ww:form>
--- /dev/null
+<%--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --%>
+
+<%@ taglib prefix="ww" uri="/webwork" %>
+<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
+<%@ taglib prefix="archiva" uri="http://archiva.apache.org" %>
+
+<html>
+<head>
+ <title>Reports</title>
+ <ww:head/>
+</head>
+
+<body>
+<h1>Statistics Report</h1>
+
+<c:url var="imgNextPageUrl" value="/images/icon_next_page.gif"/>
+<c:url var="imgPrevPageUrl" value="/images/icon_prev_page.gif"/>
+<c:url var="imgPrevPageDisabledUrl" value="/images/icon_prev_page_disabled.gif"/>
+<c:url var="imgNextPageDisabledUrl" value="/images/icon_next_page_disabled.gif"/>
+
+<div id="contentArea">
+
+ <%-- TODO: fix problem in date format! --%>
+
+ <%-- Pagination - start --%>
+ <p>
+
+ <%-- Set Prev & Next icons --%>
+ <c:set var="prevPageUrl">
+ <ww:url action="generateStatisticsReport" namespace="/report">
+ <ww:param name="selectedRepositories" value="%{'${selectedRepositories}'}"/>
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="startDate" value="%{'${startDate}'}"/>
+ <ww:param name="endDate" value="%{'${endDate}'}"/>
+ <ww:param name="page" value="%{'${page - 1}'}"/>
+ </ww:url>
+ </c:set>
+ <c:set var="nextPageUrl">
+ <ww:url action="generateStatisticsReport" namespace="/report">
+ <ww:param name="selectedRepositories" value="%{'${selectedRepositories}'}"/>
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="startDate" value="%{'${startDate}'}"/>
+ <ww:param name="endDate" value="%{'${endDate}'}"/>
+ <ww:param name="page" value="%{'${page + 1}'}"/>
+ </ww:url>
+ </c:set>
+
+ <c:choose>
+ <c:when test="${page == 1}">
+ <img src="${imgPrevPageDisabledUrl}"/>
+ </c:when>
+ <c:otherwise>
+ <a href="${prevPageUrl}">
+ <img src="${imgPrevPageUrl}"/>
+ </a>
+ </c:otherwise>
+ </c:choose>
+
+ <%-- Google-style pagination --%>
+ <c:choose>
+ <c:when test="${limits.countOfPages > 11}">
+ <c:choose>
+ <c:when test="${(page - 5) < 0}">
+ <c:set var="beginVal">0</c:set>
+ <c:set var="endVal">10</c:set>
+ </c:when>
+ <c:when test="${(page + 5) > (limits.countOfPages - 1)}">
+ <c:set var="beginVal">${(limits.countOfPages -1) - 10}</c:set>
+ <c:set var="endVal">${limits.countOfPages - 1}</c:set>
+ </c:when>
+ <c:otherwise>
+ <c:set var="beginVal">${page - 5}</c:set>
+ <c:set var="endVal">${page + 5}</c:set>
+ </c:otherwise>
+ </c:choose>
+ </c:when>
+ <c:otherwise>
+ <c:set var="beginVal">0</c:set>
+ <c:set var="endVal">${limits.countOfPages - 1}</c:set>
+ </c:otherwise>
+ </c:choose>
+
+ <c:forEach var="i" begin="${beginVal}" end="${endVal}">
+ <c:choose>
+ <c:when test="${i != (page - 1)}">
+ <c:set var="specificPageUrl">
+ <ww:url action="generateStatisticsReport" namespace="/report">
+ <ww:param name="selectedRepositories" value="%{'${selectedRepositories}'}"/>
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="startDate" value="%{'${startDate}'}"/>
+ <ww:param name="endDate" value="%{'${endDate}'}"/>
+ <ww:param name="page" value="%{'${page + 1}'}"/>
+ </ww:url>
+ </c:set>
+ <a href="${specificPageUrl}">${i + 1}</a>
+ </c:when>
+ <c:otherwise>
+ <b>${i + 1}</b>
+ </c:otherwise>
+ </c:choose>
+ </c:forEach>
+
+ <c:choose>
+ <c:when test="${page == limits.countOfPages}">
+ <img src="${imgNextPageDisabledUrl}"/>
+ </c:when>
+ <c:otherwise>
+ <a href="${nextPageUrl}">
+ <img src="${imgNextPageUrl}"/>
+ </a>
+ </c:otherwise>
+ </c:choose>
+ </p>
+ <%-- Pagination - end --%>
+
+ <c:choose>
+ <c:when test="${reposSize > 1}">
+
+ <h1>Latest Statistics Comparison Report</h1>
+ <table class="infoTable" border="1">
+ <tr>
+ <th>Repository</th>
+ <th>Total File Count</th>
+ <th>Total Size</th>
+ <th>Artifact Count</th>
+ <th>Group Count</th>
+ <th>Project Count</th>
+ <th>Plugins</th>
+ <th>Archetypes</th>
+ <th>Jars</th>
+ <th>Wars</th>
+ <th>Deployments</th>
+ <th>Downloads</th>
+ </tr>
+
+ <c:forEach var="stats" items="${repositoryStatistics}">
+ <tr>
+ <td>${stats.repositoryId}</td>
+ <td align="right">${stats.fileCount}</td>
+ <td align="right">${stats.totalSize}</td>
+ <td align="right">${stats.artifactCount}</td>
+ <td align="right">${stats.groupCount}</td>
+ <td align="right">${stats.projectCount}</td>
+ <td align="right">${stats.pluginCount}</td>
+ <td align="right">${stats.archetypeCount}</td>
+ <td align="right">${stats.jarCount}</td>
+ <td align="right">${stats.warCount}</td>
+ <td align="right">${stats.deploymentCount}</td>
+ <td align="right">${stats.downloadCount}</td>
+ </tr>
+ </c:forEach>
+ </table>
+ </c:when>
+ <c:otherwise>
+
+ <h1>Statistics for Repository '${selectedRepo}'</h1>
+ <table class="infoTable" border="1">
+ <tr>
+ <th>Date of Scan</th>
+ <th>Total File Count</th>
+ <th>Total Size</th>
+ <th>Artifact Count</th>
+ <th>Group Count</th>
+ <th>Project Count</th>
+ <th>Plugins</th>
+ <th>Archetypes</th>
+ <th>Jars</th>
+ <th>Wars</th>
+ <th>Deployments</th>
+ <th>Downloads</th>
+ </tr>
+
+ <c:forEach var="stats" items="${repositoryStatistics}">
+ <tr>
+ <td align="right">${stats.dateOfScan}</td>
+ <td align="right">${stats.fileCount}</td>
+ <td align="right">${stats.totalSize}</td>
+ <td align="right">${stats.artifactCount}</td>
+ <td align="right">${stats.groupCount}</td>
+ <td align="right">${stats.projectCount}</td>
+ <td align="right">${stats.pluginCount}</td>
+ <td align="right">${stats.archetypeCount}</td>
+ <td align="right">${stats.jarCount}</td>
+ <td align="right">${stats.warCount}</td>
+ <td align="right">${stats.deploymentCount}</td>
+ <td align="right">${stats.downloadCount}</td>
+ </tr>
+ </c:forEach>
+ </table>
+
+ </c:otherwise>
+ </c:choose>
+
+</div>
+</body>
+</html>
<body>
-<h1>Search</h1>
+<c:if test="${fromFilterSearch == true}">
+ <h1>Advanced Search</h1>
+</c:if>
+<c:if test="${fromFilterSearch == false}">
+ <h1>Search</h1>
+</c:if>
<c:url var="imgNextPageUrl" value="/images/icon_next_page.gif"/>
<c:url var="imgPrevPageUrl" value="/images/icon_prev_page.gif"/>
<div id="contentArea">
<div id="searchBox">
- <%@ include file="/WEB-INF/jsp/include/quickSearchForm.jspf" %>
+
+ <c:if test="${fromFilterSearch == true}">
+ <ww:form method="get" action="filteredSearch" validate="true">
+ <ww:textfield label="Row Count" size="50" name="rowCount"/>
+ <ww:textfield label="Group Id" size="50" name="groupId"/>
+ <ww:textfield label="Artifact Id" size="50" name="artifactId"/>
+ <ww:textfield label="Version" size="50" name="version"/>
+ <ww:textfield label="Class / Package" size="50" name="className"/>
+ <ww:select name="repositoryId" label="Repository ID" list="managedRepositoryList"/>
+ <ww:hidden name="completeQueryString" value="${completeQueryString}"/>
+ <ww:hidden name="fromFilterSearch" value="${fromFilterSearch}"/>
+ <ww:submit label="Go!"/>
+ </ww:form>
+
+ <ww:url id="indexUrl" action="index"/>
+ <ww:a href="%{indexUrl}">
+ Quick Search Page
+ </ww:a>
+ <script type="text/javascript">
+ document.getElementById("filteredSearch_groupId").focus();
+ </script>
+ </c:if>
+ <c:if test="${fromFilterSearch == false}">
+ <ww:form method="get" action="quickSearch" validate="true">
+ <ww:textfield label="Search for" size="50" name="q"/>
+ <ww:checkbox label="Search within results" name="searchResultsOnly"/>
+ <ww:hidden name="completeQueryString" value="${completeQueryString}"/>
+ <ww:submit label="Go!"/>
+ </ww:form>
+ <script type="text/javascript">
+ document.getElementById("quickSearch_q").focus();
+ </script>
+ </c:if>
+
+ <p>
+ <ww:actionerror/>
+ </p>
+
</div>
<h1>Results</h1>
<%-- search was made from the indices --%>
<c:when test="${databaseResults == null}">
- <c:set var="hitsNum">${fn:length(results.hits) + (currentPage * results.limits.pageSize)}</c:set>
+ <c:set var="hitsNum">${fn:length(results.hits) + (currentPage * results.limits.pageSize)}</c:set>
<c:choose>
<c:when test="${results.totalHits > results.limits.pageSize}">
- <p>Hits: ${(hitsNum - results.limits.pageSize) + 1} to ${hitsNum} of ${results.totalHits}</p>
+ <p>Hits: ${(hitsNum - results.limits.pageSize) + 1} to ${hitsNum} of ${results.totalHits}</p>
</c:when>
<c:otherwise>
<p>Hits: 1 to ${hitsNum} of ${results.totalHits}</p>
<%-- Pagination start --%>
<p>
<%-- Prev & Next icons --%>
- <c:set var="prevPageUrl">
- <ww:url action="quickSearch" namespace="/">
- <ww:param name="q" value="%{'${q}'}"/>
- <ww:param name="currentPage" value="%{'${currentPage - 1}'}"/>
- </ww:url>
- </c:set>
- <c:set var="nextPageUrl">
- <ww:url action="quickSearch" namespace="/">
- <ww:param name="q" value="%{'${q}'}"/>
- <ww:param name="currentPage" value="%{'${currentPage + 1}'}"/>
- </ww:url>
- </c:set>
+ <c:if test="${fromFilterSearch == false}">
+ <c:set var="prevPageUrl">
+ <ww:url action="quickSearch" namespace="/">
+ <ww:param name="q" value="%{'${q}'}"/>
+ <ww:param name="currentPage" value="%{'${currentPage - 1}'}"/>
+ </ww:url>
+ </c:set>
+ <c:set var="nextPageUrl">
+ <ww:url action="quickSearch" namespace="/">
+ <ww:param name="q" value="%{'${q}'}"/>
+ <ww:param name="currentPage" value="%{'${currentPage + 1}'}"/>
+ </ww:url>
+ </c:set>
+ </c:if>
+
+ <c:if test="${fromFilterSearch == true}">
+ <c:set var="prevPageUrl">
+ <ww:url action="filteredSearch" namespace="/">
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="groupId" value="%{'${groupId}'}"/>
+ <ww:param name="artifactId" value="%{'${artifactId}'}"/>
+ <ww:param name="version" value="%{'${version}'}"/>
+ <ww:param name="className" value="%{'${className}'}"/>
+ <ww:param name="repositoryId" value="%{'${repositoryId}'}"/>
+ <ww:param name="filterSearch" value="%{'${filterSearch}'}"/>
+ <ww:param name="fromResultsPage" value="true"/>
+ <ww:param name="currentPage" value="%{'${currentPage - 1}'}"/>
+ <ww:param name="searchResultsOnly" value="%{'${searchResultsOnly}'}"/>
+ <ww:param name="completeQueryString" value="%{'${completeQueryString}'}"/>
+ </ww:url>
+ </c:set>
+ <c:set var="nextPageUrl">
+ <ww:url action="filteredSearch" namespace="/">
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="groupId" value="%{'${groupId}'}"/>
+ <ww:param name="artifactId" value="%{'${artifactId}'}"/>
+ <ww:param name="version" value="%{'${version}'}"/>
+ <ww:param name="className" value="%{'${className}'}"/>
+ <ww:param name="repositoryId" value="%{'${repositoryId}'}"/>
+ <ww:param name="filterSearch" value="%{'${filterSearch}'}"/>
+ <ww:param name="fromResultsPage" value="true"/>
+ <ww:param name="currentPage" value="%{'${currentPage + 1}'}"/>
+ <ww:param name="searchResultsOnly" value="%{'${searchResultsOnly}'}"/>
+ <ww:param name="completeQueryString" value="%{'${completeQueryString}'}"/>
+ </ww:url>
+ </c:set>
+ </c:if>
<c:choose>
<c:when test="${currentPage == 0}">
</c:choose>
<c:forEach var="i" begin="${beginVal}" end="${endVal}">
- <c:choose>
- <c:when test="${i != currentPage}">
- <c:set var="specificPageUrl">
- <ww:url action="quickSearch" namespace="/">
- <ww:param name="q" value="%{'${q}'}"/>
- <ww:param name="currentPage" value="%{'${i}'}"/>
- <ww:param name="searchResultsOnly" value="%{'${searchResultsOnly}'}"/>
- <ww:param name="completeQueryString" value="%{'${completeQueryString}'}"/>
- </ww:url>
- </c:set>
- <a href="${specificPageUrl}">${i + 1}</a>
- </c:when>
- <c:otherwise>
- <b>${i + 1}</b>
- </c:otherwise>
- </c:choose>
+ <c:if test="${fromFilterSearch == false}">
+ <c:choose>
+ <c:when test="${i != currentPage}">
+ <c:set var="specificPageUrl">
+ <ww:url action="quickSearch" namespace="/">
+ <ww:param name="q" value="%{'${q}'}"/>
+ <ww:param name="currentPage" value="%{'${i}'}"/>
+ <ww:param name="searchResultsOnly" value="%{'${searchResultsOnly}'}"/>
+ <ww:param name="completeQueryString" value="%{'${completeQueryString}'}"/>
+ </ww:url>
+ </c:set>
+ <a href="${specificPageUrl}">${i + 1}</a>
+ </c:when>
+ <c:otherwise>
+ <b>${i + 1}</b>
+ </c:otherwise>
+ </c:choose>
+ </c:if>
+
+ <c:if test="${fromFilterSearch == true}">
+ <c:choose>
+ <c:when test="${i != currentPage}">
+ <c:set var="specificPageUrl">
+ <ww:url action="filteredSearch" namespace="/">
+ <ww:param name="rowCount" value="%{'${rowCount}'}"/>
+ <ww:param name="groupId" value="%{'${groupId}'}"/>
+ <ww:param name="artifactId" value="%{'${artifactId}'}"/>
+ <ww:param name="version" value="%{'${version}'}"/>
+ <ww:param name="className" value="%{'${className}'}"/>
+ <ww:param name="repositoryId" value="%{'${repositoryId}'}"/>
+ <ww:param name="filterSearch" value="%{'${filterSearch}'}"/>
+ <ww:param name="fromResultsPage" value="true"/>
+ <ww:param name="currentPage" value="%{'${i}'}"/>
+ <ww:param name="searchResultsOnly" value="%{'${searchResultsOnly}'}"/>
+ <ww:param name="completeQueryString" value="%{'${completeQueryString}'}"/>
+ </ww:url>
+ </c:set>
+ <a href="${specificPageUrl}">${i + 1}</a>
+ </c:when>
+ <c:otherwise>
+ <b>${i + 1}</b>
+ </c:otherwise>
+ </c:choose>
+ </c:if>
</c:forEach>
-
- <c:choose>
+
+ <c:choose>
<c:when test="${currentPage == (totalPages - 1)}">
<img src="${imgNextPageDisabledUrl}"/>
</c:when>
-<?xml version="1.0" encoding="UTF-8"?>
+<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ specific language governing permissions and limitations
~ under the License.
-->
-<web-app version="2.4" xmlns="http://java.sun.com/xml/ns/j2ee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd">
+
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee" version="2.4"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd">
<display-name>Apache Archiva</display-name>
<!-- Loading this on startup so as to take advantage of configuration listeners -->
<load-on-startup>1</load-on-startup>
</servlet>
-
- <servlet>
- <servlet-name>XmlRpcServlet</servlet-name>
- <servlet-class>com.atlassian.xmlrpc.spring.BinderSpringXmlRpcServlet</servlet-class>
- <init-param>
- <param-name>serviceListBeanName</param-name>
- <param-value>xmlrpcServicesList</param-value>
- </init-param>
- <init-param>
- <param-name>authHandlerBeanName</param-name>
- <param-value>xmlRpcAuthenticator</param-value>
- </init-param>
- <load-on-startup>1</load-on-startup>
- </servlet>
<servlet>
<servlet-name>RssFeedServlet</servlet-name>
<servlet-mapping>
<servlet-name>RssFeedServlet</servlet-name>
- <url-pattern>/rss/rss_feeds</url-pattern>
+ <url-pattern>/feeds/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<url-pattern>/repository/*</url-pattern>
</servlet-mapping>
- <servlet-mapping>
- <servlet-name>XmlRpcServlet</servlet-name>
- <url-pattern>/xmlrpc</url-pattern>
- </servlet-mapping>
-
<resource-ref>
<res-ref-name>jdbc/users</res-ref-name>
<res-type>javax.sql.DataSource</res-type>
<res-auth>Container</res-auth>
<res-sharing-scope>Shareable</res-sharing-scope>
</resource-ref>
+
</web-app>
select {
padding-left: 3px;
- height: 1.4em;
+ height: auto;
+ width: auto;
}
input {
throws Exception
{
RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/rss/rss_feeds?repoId=test-repo" ).getServlet();
+ (RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
assertNotNull( servlet );
}
throws Exception
{
RssFeedServlet servlet =
- (RssFeedServlet) client.newInvocation( "http://localhost/rss/rss_feeds?repoId=test-repo" ).getServlet();
+ (RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
assertNotNull( servlet );
- WebRequest request = new GetMethodWebRequest( "http://localhost/rss/rss_feeds?repoId=test-repo" );
+ WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/test-repo" );
BASE64Encoder encoder = new BASE64Encoder();
String userPass = "user1:password1";
{
RssFeedServlet servlet =
(RssFeedServlet) client.newInvocation(
- "http://localhost/rss/rss_feeds?groupId=org.apache.archiva&artifactId=artifact-two" ).getServlet();
+ "http://localhost/feeds/org/apache/archiva/artifact-two" ).getServlet();
assertNotNull( servlet );
- WebRequest request = new GetMethodWebRequest( "http://localhost/rss/rss_feeds?groupId=org.apache.archiva&artifactId=artifact-two" );
+ WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/org/apache/archiva/artifact-two" );
BASE64Encoder encoder = new BASE64Encoder();
String userPass = "user1:password1";
{
RssFeedServlet servlet =
(RssFeedServlet) client.newInvocation(
- "http://localhost/rss/rss_feeds?invalid_param=xxx" ).getServlet();
+ "http://localhost/feeds?invalid_param=xxx" ).getServlet();
assertNotNull( servlet );
try
{
- WebResponse response = client.getResponse( "http://localhost/rss/rss_feeds?invalid_param=xxx" );
+ WebResponse response = client.getResponse( "http://localhost/feeds?invalid_param=xxx" );
}
catch ( HttpException he )
{
{
RssFeedServlet servlet =
(RssFeedServlet) client.newInvocation(
- "http://localhost/rss/rss_feeds?repoId=unauthorized-repo" ).getServlet();
+ "http://localhost/feeds/unauthorized-repo" ).getServlet();
assertNotNull( servlet );
- WebRequest request = new GetMethodWebRequest( "http://localhost/rss/rss_feeds?repoId=unauthorized-repo" );
+ WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
Encoder encoder = new Base64();
String userPass = "unauthUser:unauthPass";
{
RssFeedServlet servlet =
(RssFeedServlet) client.newInvocation(
- "http://localhost/rss/rss_feeds?repoId=unauthorized-repo" ).getServlet();
+ "http://localhost/feeds/unauthorized-repo" ).getServlet();
assertNotNull( servlet );
- WebRequest request = new GetMethodWebRequest( "http://localhost/rss/rss_feeds?repoId=unauthorized-repo" );
+ WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
BASE64Encoder encoder = new BASE64Encoder();
String userPass = "user1:password1";
<servlet-mapping>
<servlet-name>RssFeedServlet</servlet-name>
- <url-pattern>/rss/*</url-pattern>
+ <url-pattern>/feeds/*</url-pattern>
</servlet-mapping>
<context-param>
<artifactId>maven-model</artifactId>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-xwork-integration</artifactId>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-keys-memory</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-memory</artifactId>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-users-memory</artifactId>
<scope>test</scope>
</dependency>
private final MimeTypes mimeTypes;
private List<AuditListener> auditListeners;
+
+ private ArchivaXworkUser archivaXworkUser;
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryContentConsumers consumers )
+ RepositoryContentConsumers consumers, ArchivaXworkUser archivaXworkUser )
{
this.localResource = new File( localResource );
this.logicalResource = logicalResource;
this.locator = locator;
this.factory = factory;
this.session = session;
+ this.archivaXworkUser = archivaXworkUser;
// TODO: push into locator as well as moving any references out of the resource factory
this.repository = repository;
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
String remoteAddr, DavSession session, ArchivaDavResourceLocator locator,
DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryContentConsumers consumers )
+ RepositoryContentConsumers consumers, ArchivaXworkUser archivaXworkUser )
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
- consumers );
+ consumers, archivaXworkUser );
this.remoteAddr = remoteAddr;
}
private void triggerAuditEvent( String remoteIP, String repositoryId, String resource, String action )
{
- String activePrincipal = ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ String activePrincipal = archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
AuditEvent event = new AuditEvent( repositoryId, activePrincipal, resource, action );
event.setRemoteIP( remoteIP );
* @plexus.requirement role-hint="md5";
*/
private Digester digestMd5;
+
+ /**
+ * @plexus.requirement
+ */
+ private ArchivaXworkUser archivaXworkUser;
public DavResource createResource( final DavResourceLocator locator, final DavServletRequest request,
final DavServletResponse response )
{
checkLocatorIsInstanceOfRepositoryLocator( locator );
ArchivaDavResourceLocator archivaLocator = (ArchivaDavResourceLocator) locator;
-
+
RepositoryGroupConfiguration repoGroupConfig =
archivaConfiguration.getConfiguration().getRepositoryGroupsAsMap().get( archivaLocator.getRepositoryId() );
List<String> repositories = new ArrayList<String>();
boolean isGet = WebdavMethodUtil.isReadMethod( request.getMethod() );
boolean isPut = WebdavMethodUtil.isWriteMethod( request.getMethod() );
-
+
if ( repoGroupConfig != null )
{
if( WebdavMethodUtil.isWriteMethod( request.getMethod() ) )
try
{
- managedRepository = getManagedRepository( repositoryId );
+ managedRepository = getManagedRepository( repositoryId );
}
catch ( DavException de )
{
DavResource resource = null;
if ( !locator.getResourcePath().startsWith( ArchivaDavResource.HIDDEN_PATH_PREFIX ) )
- {
+ {
if ( managedRepository != null )
{
try
{
if( isAuthorized( request, repositoryId ) )
- {
+ {
LogicalResource logicalResource =
new LogicalResource( RepositoryPathUtil.getLogicalResource( locator.getResourcePath() ) );
if ( isPut )
{
- resource = doPut( managedRepository, request, archivaLocator, logicalResource );
+ resource = doPut( managedRepository, request, archivaLocator, logicalResource );
}
}
}
catch ( DavException de )
- {
+ {
e = de;
continue;
}
e = new DavException( HttpServletResponse.SC_NOT_FOUND, "Resource does not exist" );
}
else
- {
+ {
availableResources.add( resource );
String logicalResource = RepositoryPathUtil.getLogicalResource( locator.getResourcePath() );
- resourcesInAbsolutePath.add( managedRepository.getRepoRoot() + logicalResource );
+ resourcesInAbsolutePath.add( managedRepository.getRepoRoot() + logicalResource );
}
}
else
ArchivaDavResource metadataChecksumResource =
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), request.getDavSession(), archivaLocator, this,
- mimeTypes, auditListeners, consumers );
+ mimeTypes, auditListeners, consumers, archivaXworkUser );
availableResources.add( 0, metadataChecksumResource );
}
}
ArchivaDavResource metadataResource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), request.getDavSession(), archivaLocator, this,
- mimeTypes, auditListeners, consumers );
+ mimeTypes, auditListeners, consumers, archivaXworkUser );
availableResources.add( 0, metadataResource );
}
catch ( RepositoryMetadataException r )
resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource,
managedRepository.getRepository(), davSession, archivaLocator, this, mimeTypes,
- auditListeners, consumers );
+ auditListeners, consumers, archivaXworkUser );
}
resource.addLockManager(lockManager);
return resource;
ArchivaDavResource resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(),
- request.getDavSession(), locator, this, mimeTypes, auditListeners, consumers );
+ request.getDavSession(), locator, this, mimeTypes, auditListeners, consumers, archivaXworkUser );
if ( !resource.isCollection() )
{
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(),
request.getDavSession(), locator, this, mimeTypes, auditListeners,
- consumers );
+ consumers, archivaXworkUser );
}
}
return resource;
File rootDirectory = new File( managedRepository.getRepoRoot() );
File destDir = new File( rootDirectory, logicalResource.getPath() ).getParentFile();
+
if ( request.getMethod().equals(HTTP_PUT_METHOD) && !destDir.exists() )
{
destDir.mkdirs();
String relPath = PathUtil.getRelative( rootDirectory.getAbsolutePath(), destDir );
triggerAuditEvent( request.getRemoteAddr(), logicalResource.getPath(), relPath, AuditEvent.CREATE_DIR );
}
-
- File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource.getPath() );
-
+
+ File resourceFile = new File( managedRepository.getRepoRoot(), logicalResource.getPath() );
+
return new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(),
managedRepository.getRepository(), request.getRemoteAddr(),
- request.getDavSession(), locator, this, mimeTypes, auditListeners, consumers );
+ request.getDavSession(), locator, this, mimeTypes, auditListeners, consumers, archivaXworkUser );
}
private boolean fetchContentFromProxies( ManagedRepositoryContent managedRepository, DavServletRequest request,
// TODO: remove?
private void triggerAuditEvent( String remoteIP, String repositoryId, String resource, String action )
{
- String activePrincipal = ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
+ String activePrincipal = archivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
AuditEvent event = new AuditEvent( repositoryId, activePrincipal, resource, action );
event.setRemoteIP( remoteIP );
protected boolean isAuthorized( DavServletRequest request, String repositoryId )
throws DavException
- {
+ {
try
- {
+ {
AuthenticationResult result = httpAuth.getAuthenticationResult( request, null );
SecuritySession securitySession = httpAuth.getSecuritySession();
WebdavMethodUtil.isWriteMethod( request.getMethod() ) );
}
catch ( AuthenticationException e )
- {
+ {
+ boolean isPut = WebdavMethodUtil.isWriteMethod( request.getMethod() );
+
+ // safety check for MRM-911
+ String guest = archivaXworkUser.getGuest();
+ try
+ {
+ if( servletAuth.isAuthorized( guest,
+ ( ( ArchivaDavResourceLocator ) request.getRequestLocator() ).getRepositoryId(), isPut ) )
+ {
+ return true;
+ }
+ }
+ catch ( UnauthorizedException ae )
+ {
+ throw new UnauthorizedDavException( repositoryId,
+ "You are not authenticated and authorized to access any repository." );
+ }
+
throw new UnauthorizedDavException( repositoryId, "You are not authenticated" );
}
catch ( MustChangePasswordException e )
request.getSession().getAttribute( SecuritySystemConstants.SECURITY_SESSION_KEY ) );
}
- String activePrincipal = ArchivaXworkUser.getActivePrincipal( sessionMap );
+ String activePrincipal = archivaXworkUser.getActivePrincipal( sessionMap );
boolean allow = isAllowedToContinue( request, repositories, activePrincipal );
if( allow )
{
+ boolean isPut = WebdavMethodUtil.isWriteMethod( request.getMethod() );
+
for( String repository : repositories )
{
// for prompted authentication
// for the current user logged in
try
{
- if( servletAuth.isAuthorizedToAccessVirtualRepository( activePrincipal, repository ) )
+ if( servletAuth.isAuthorized( activePrincipal, repository, isPut ) )
{
getResource( locator, mergedRepositoryContents, logicalResource, repository );
}
}
else
{
+ boolean isPut = WebdavMethodUtil.isWriteMethod( request.getMethod() );
for( String repository : repositories )
{
try
- {
- if( servletAuth.isAuthorizedToAccessVirtualRepository( activePrincipal, repository ) )
+ {
+ if( servletAuth.isAuthorized( activePrincipal, repository, isPut ) )
{
allow = true;
break;
return true;
}
}
+
+ public void setServletAuth( ServletAuthenticator servletAuth )
+ {
+ this.servletAuth = servletAuth;
+ }
+
+ public void setHttpAuth( HttpAuthenticator httpAuth )
+ {
+ this.httpAuth = httpAuth;
+ }
}
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.DavServletRequest;
import org.apache.maven.archiva.webdav.util.RepositoryPathUtil;
+import org.apache.maven.archiva.webdav.util.WebdavMethodUtil;
+import org.apache.maven.archiva.security.ArchivaXworkUser;
import org.apache.maven.archiva.security.ServletAuthenticator;
import org.codehaus.plexus.redback.authentication.AuthenticationException;
import org.codehaus.plexus.redback.authentication.AuthenticationResult;
+import org.codehaus.plexus.redback.authorization.UnauthorizedException;
import org.codehaus.plexus.redback.policy.MustChangePasswordException;
import org.codehaus.plexus.redback.policy.AccountLockedException;
import org.codehaus.plexus.redback.xwork.filter.authentication.HttpAuthenticator;
private HttpAuthenticator httpAuth;
- public ArchivaDavSessionProvider( ServletAuthenticator servletAuth, HttpAuthenticator httpAuth )
+ private ArchivaXworkUser archivaXworkUser;
+
+ public ArchivaDavSessionProvider( ServletAuthenticator servletAuth, HttpAuthenticator httpAuth, ArchivaXworkUser archivaXworkUser )
{
this.servletAuth = servletAuth;
this.httpAuth = httpAuth;
+ this.archivaXworkUser = archivaXworkUser;
}
public boolean attachSession( WebdavRequest request )
}
catch ( AuthenticationException e )
{
- throw new UnauthorizedDavException( repositoryId, "You are not authenticated" );
+ boolean isPut = WebdavMethodUtil.isWriteMethod( request.getMethod() );
+
+ // safety check for MRM-911
+ String guest = archivaXworkUser.getGuest();
+ try
+ {
+ if( servletAuth.isAuthorized( guest,
+ ( ( ArchivaDavResourceLocator ) request.getRequestLocator() ).getRepositoryId(), isPut ) )
+ {
+ request.setDavSession(new ArchivaDavSession());
+ return true;
+ }
+ }
+ catch ( UnauthorizedException ae )
+ {
+ throw new UnauthorizedDavException( repositoryId,
+ "You are not authenticated and authorized to access any repository." );
+ }
+
+ throw new UnauthorizedDavException( repositoryId, "You are not authenticated." );
}
catch ( MustChangePasswordException e )
{
import org.apache.maven.archiva.configuration.ConfigurationEvent;
import org.apache.maven.archiva.configuration.ConfigurationListener;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.security.ArchivaXworkUser;
import org.apache.maven.archiva.security.ServletAuthenticator;
import org.codehaus.plexus.redback.xwork.filter.authentication.HttpAuthenticator;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
HttpAuthenticator httpAuth =
(HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE, "basic" ) );
- sessionProvider = new ArchivaDavSessionProvider( servletAuth, httpAuth );
+ ArchivaXworkUser archivaXworkUser =
+ (ArchivaXworkUser) wac.getBean( PlexusToSpringUtils.buildSpringId( ArchivaXworkUser.class.getName() ) );
+ sessionProvider = new ArchivaDavSessionProvider( servletAuth, httpAuth, archivaXworkUser );
}
public void configurationEvent( ConfigurationEvent event )
throws Exception
{
super.setUp();
- sessionProvider = new ArchivaDavSessionProvider(new ServletAuthenticatorMock(), new HttpAuthenticatorMock());
+ sessionProvider = new ArchivaDavSessionProvider(new ServletAuthenticatorMock(), new HttpAuthenticatorMock(), null);
request = new WebdavRequestImpl(new HttpServletRequestMock(), null);
}
return true;
}
- public boolean isAuthorizedToAccessVirtualRepository(String arg0, String arg1)
+ public boolean isAuthorized(String arg0, String arg1, boolean isWriteRequest)
throws UnauthorizedException
{
return true;
*/
import java.io.File;
-import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.lock.SimpleLockManager;
import org.apache.jackrabbit.webdav.lock.Type;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.maven.archiva.repository.audit.AuditListener;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
+import org.apache.maven.archiva.security.ArchivaXworkUser;
import org.apache.maven.archiva.webdav.util.MimeTypes;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
-import org.easymock.MockControl;
import edu.emory.mathcs.backport.java.util.Collections;
private RepositoryContentConsumers consumers;
private ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
+
+ private ArchivaXworkUser archivaXworkUser;
@Override
protected void setUp()
lockManager = new SimpleLockManager();
resource.addLockManager(lockManager);
consumers = new RepositoryContentConsumers();
+ archivaXworkUser = (ArchivaXworkUser) getApplicationContext().getBean( PlexusToSpringUtils.buildSpringId( ArchivaXworkUser.class ) );
}
@Override
private DavResource getDavResource(String logicalPath, File file)
{
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
- resourceFactory, mimeTypes, Collections.emptyList(), consumers );
+ resourceFactory, mimeTypes, Collections.emptyList(), consumers, archivaXworkUser );
}
public void testDeleteNonExistantResourceShould404()
public DavResource createResource(DavResourceLocator locator, DavSession session) throws DavException {
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
- resourceFactory, mimeTypes, Collections.emptyList(), consumers );
+ resourceFactory, mimeTypes, Collections.emptyList(), consumers, archivaXworkUser );
}
}
}
package org.apache.maven.archiva.webdav;
-/**
- * RepositoryServletSecurityTest
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
*
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.http.HttpServletResponse;
+
+import net.sf.ehcache.CacheManager;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.webdav.DavResourceFactory;
+import org.apache.jackrabbit.webdav.DavSessionProvider;
+import org.apache.maven.archiva.configuration.ArchivaConfiguration;
+import org.apache.maven.archiva.configuration.Configuration;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.security.ArchivaXworkUser;
+import org.apache.maven.archiva.security.ServletAuthenticator;
+import org.codehaus.plexus.redback.authentication.AuthenticationException;
+import org.codehaus.plexus.redback.authentication.AuthenticationResult;
+import org.codehaus.plexus.redback.authorization.UnauthorizedException;
+import org.codehaus.plexus.redback.system.DefaultSecuritySession;
+import org.codehaus.plexus.redback.system.SecuritySession;
+import org.codehaus.plexus.redback.xwork.filter.authentication.HttpAuthenticator;
+import org.codehaus.plexus.redback.xwork.filter.authentication.basic.HttpBasicAuthentication;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+import org.easymock.MockControl;
+import org.easymock.classextension.MockClassControl;
+import org.easymock.internal.AlwaysMatcher;
+
+import com.meterware.httpunit.GetMethodWebRequest;
+import com.meterware.httpunit.HttpUnitOptions;
+import com.meterware.httpunit.PutMethodWebRequest;
+import com.meterware.httpunit.WebRequest;
+import com.meterware.httpunit.WebResponse;
+import com.meterware.servletunit.InvocationContext;
+import com.meterware.servletunit.ServletRunner;
+import com.meterware.servletunit.ServletUnitClient;
+
+/**
+ * RepositoryServletSecurityTest
+ *
+ * Test the flow of the authentication and authorization checks. This does not necessarily
+ * perform redback security checking.
+ *
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryServletSecurityTest
- extends AbstractRepositoryServletTestCase
+ extends PlexusInSpringTestCase
{
- public void testSecuredGet()
+ protected static final String REPOID_INTERNAL = "internal";
+
+ protected ServletUnitClient sc;
+
+ protected File repoRootInternal;
+
+ private ServletRunner sr;
+
+ protected ArchivaConfiguration archivaConfiguration;
+
+ private DavSessionProvider davSessionProvider;
+
+ private MockControl servletAuthControl;
+
+ private ServletAuthenticator servletAuth;
+
+ private MockClassControl httpAuthControl;
+
+ private HttpAuthenticator httpAuth;
+
+ private ArchivaXworkUser archivaXworkUser;
+
+ private RepositoryServlet servlet;
+
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ String appserverBase = getTestFile( "target/appserver-base" ).getAbsolutePath();
+ System.setProperty( "appserver.base", appserverBase );
+
+ File testConf = getTestFile( "src/test/resources/repository-archiva.xml" );
+ File testConfDest = new File( appserverBase, "conf/archiva.xml" );
+ FileUtils.copyFile( testConf, testConfDest );
+
+ archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class );
+ repoRootInternal = new File( appserverBase, "data/repositories/internal" );
+ Configuration config = archivaConfiguration.getConfiguration();
+
+ config.addManagedRepository( createManagedRepository( REPOID_INTERNAL, "Internal Test Repo", repoRootInternal ) );
+ saveConfiguration( archivaConfiguration );
+
+ CacheManager.getInstance().removeCache( "url-failures-cache" );
+
+ HttpUnitOptions.setExceptionsThrownOnErrorStatus( false );
+
+ sr = new ServletRunner( getTestFile( "src/test/resources/WEB-INF/repository-servlet-security-test/web.xml" ) );
+ sr.registerServlet( "/repository/*", RepositoryServlet.class.getName() );
+ sc = sr.newClient();
+
+ servletAuthControl = MockControl.createControl( ServletAuthenticator.class );
+ servletAuthControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
+ servletAuth = (ServletAuthenticator) servletAuthControl.getMock();
+
+ httpAuthControl =
+ MockClassControl.createControl( HttpBasicAuthentication.class, HttpBasicAuthentication.class.getMethods() );
+ httpAuthControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
+ httpAuth = (HttpAuthenticator) httpAuthControl.getMock();
+
+ archivaXworkUser = new ArchivaXworkUser();
+ archivaXworkUser.setGuest( "guest" );
+
+ davSessionProvider = new ArchivaDavSessionProvider( servletAuth, httpAuth, archivaXworkUser );
+ }
+
+ protected ManagedRepositoryConfiguration createManagedRepository( String id, String name, File location )
{
+ ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
+ repo.setId( id );
+ repo.setName( name );
+ repo.setLocation( location.getAbsolutePath() );
+ return repo;
+ }
+
+ protected void saveConfiguration()
+ throws Exception
+ {
+ saveConfiguration( archivaConfiguration );
+ }
+
+ protected void saveConfiguration( ArchivaConfiguration archivaConfiguration )
+ throws Exception
+ {
+ archivaConfiguration.save( archivaConfiguration.getConfiguration() );
+ }
+
+ protected void setupCleanRepo( File repoRootDir )
+ throws IOException
+ {
+ FileUtils.deleteDirectory( repoRootDir );
+ if ( !repoRootDir.exists() )
+ {
+ repoRootDir.mkdirs();
+ }
+ }
+
+ @Override
+ protected String getPlexusConfigLocation()
+ {
+ return "org/apache/maven/archiva/webdav/RepositoryServletSecurityTest.xml";
+ }
+
+ @Override
+ protected void tearDown()
+ throws Exception
+ {
+ if ( sc != null )
+ {
+ sc.clearContents();
+ }
+
+ if ( sr != null )
+ {
+ sr.shutDown();
+ }
+
+ if ( repoRootInternal.exists() )
+ {
+ FileUtils.deleteDirectory(repoRootInternal);
+ }
+
+ servlet = null;
+ super.tearDown();
}
-
- public void testSecuredBrowse()
+
+ // test deploy with invalid user, and guest has no write access to repo
+ // 401 must be returned
+ public void testPutWithInvalidUserAndGuestHasNoWriteAccess()
+ throws Exception
{
+ setupCleanRepo( repoRootInternal );
+
+ String putUrl = "http://machine.com/repository/internal/path/to/artifact.jar";
+ InputStream is = getClass().getResourceAsStream( "/artifact.jar" );
+ assertNotNull( "artifact.jar inputstream", is );
+
+ WebRequest request = new PutMethodWebRequest( putUrl, is, "application/octet-stream" );
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthenticated( null, null ),
+ new AuthenticationException( "Authentication error" ) );
+
+ servletAuth.isAuthorized( "guest", "internal", true );
+ servletAuthControl.setMatcher( MockControl.EQUALS_MATCHER );
+ servletAuthControl.setThrowable( new UnauthorizedException( "'guest' has no write access to repository" ) );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ servlet.service( ic.getRequest(), ic.getResponse() );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ //assertEquals(HttpServletResponse.SC_UNAUTHORIZED, response.getResponseCode());
+ }
+
+ // test deploy with invalid user, but guest has write access to repo
+ public void testPutWithInvalidUserAndGuestHasWriteAccess()
+ throws Exception
+ {
+ setupCleanRepo( repoRootInternal );
+
+ String putUrl = "http://machine.com/repository/internal/path/to/artifact.jar";
+ InputStream is = getClass().getResourceAsStream( "/artifact.jar" );
+ assertNotNull( "artifact.jar inputstream", is );
+
+ WebRequest request = new PutMethodWebRequest( putUrl, is, "application/octet-stream" );
+
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthenticated( null, null ),
+ new AuthenticationException( "Authentication error" ) );
+
+ servletAuth.isAuthorized( "guest", "internal", true );
+ servletAuthControl.setMatcher( MockControl.EQUALS_MATCHER );
+ servletAuthControl.setReturnValue( true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthenticated( null, result ),
+ new AuthenticationException( "Authentication error" ) );
+
+ // check if guest has write access
+ servletAuth.isAuthorized( "guest", "internal", true );
+ servletAuthControl.setMatcher( MockControl.EQUALS_MATCHER );
+ servletAuthControl.setReturnValue( true );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ servlet.service( ic.getRequest(), ic.getResponse() );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ // assertEquals( HttpServletResponse.SC_CREATED, response.getResponseCode() );
+ }
+
+ // test deploy with a valid user with no write access
+ public void testPutWithValidUserWithNoWriteAccess()
+ throws Exception
+ {
+ setupCleanRepo( repoRootInternal );
+
+ String putUrl = "http://machine.com/repository/internal/path/to/artifact.jar";
+ InputStream is = getClass().getResourceAsStream( "/artifact.jar" );
+ assertNotNull( "artifact.jar inputstream", is );
+
+ WebRequest request = new PutMethodWebRequest( putUrl, is, "application/octet-stream" );
+
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, null ), true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, result ), true );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthorized( null, session, "internal", true ),
+ new UnauthorizedException( "User not authorized" ) );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ servlet.service( ic.getRequest(), ic.getResponse() );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ // assertEquals(HttpServletResponse.SC_UNAUTHORIZED, response.getResponseCode());
+ }
+
+ // test deploy with a valid user with write access
+ public void testPutWithValidUserWithWriteAccess()
+ throws Exception
+ {
+ setupCleanRepo( repoRootInternal );
+ assertTrue( repoRootInternal.exists() );
+
+ String putUrl = "http://machine.com/repository/internal/path/to/artifact.jar";
+ InputStream is = getClass().getResourceAsStream( "/artifact.jar" );
+ assertNotNull( "artifact.jar inputstream", is );
+
+ WebRequest request = new PutMethodWebRequest( putUrl, is, "application/octet-stream" );
+
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, null ), true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, result ), true );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthorized( null, session, "internal", true ), true );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ servlet.service( ic.getRequest(), ic.getResponse() );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ // assertEquals(HttpServletResponse.SC_CREATED, response.getResponseCode());
+ }
+
+ // test get with invalid user, and guest has read access to repo
+ public void testGetWithInvalidUserAndGuestHasReadAccess()
+ throws Exception
+ {
+ String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
+ String expectedArtifactContents = "dummy-commons-lang-artifact";
+
+ File artifactFile = new File( repoRootInternal, commonsLangJar );
+ artifactFile.getParentFile().mkdirs();
+
+ FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, null );
+
+ WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthenticated( null, null ),
+ new AuthenticationException( "Authentication error" ) );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthorized( "guest", "internal", false ), true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, result ), true );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthorized( null, session, "internal", true ), true );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ WebResponse response = sc.getResponse( request );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ assertEquals( HttpServletResponse.SC_OK, response.getResponseCode() );
+ assertEquals( "Expected file contents", expectedArtifactContents, response.getText() );
+ }
+
+ // test get with invalid user, and guest has no read access to repo
+ public void testGetWithInvalidUserAndGuestHasNoReadAccess()
+ throws Exception
+ {
+ String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
+ String expectedArtifactContents = "dummy-commons-lang-artifact";
+
+ File artifactFile = new File( repoRootInternal, commonsLangJar );
+ artifactFile.getParentFile().mkdirs();
+
+ FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, null );
+
+ WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthenticated( null, null ),
+ new AuthenticationException( "Authentication error" ) );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthorized( "guest", "internal", false ), false );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ WebResponse response = sc.getResponse( request );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ assertEquals( HttpServletResponse.SC_UNAUTHORIZED, response.getResponseCode() );
+ }
+
+ // test get with valid user with read access to repo
+ public void testGetWithAValidUserWithReadAccess()
+ throws Exception
+ {
+ String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
+ String expectedArtifactContents = "dummy-commons-lang-artifact";
+
+ File artifactFile = new File( repoRootInternal, commonsLangJar );
+ artifactFile.getParentFile().mkdirs();
+
+ FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, null );
+
+ WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, null ), true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, result ), true );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthorized( null, session, "internal", true ), true );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ WebResponse response = sc.getResponse( request );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+
+ assertEquals( HttpServletResponse.SC_OK, response.getResponseCode() );
+ assertEquals( "Expected file contents", expectedArtifactContents, response.getText() );
+ }
+
+ // test get with valid user with no read access to repo
+ public void testGetWithAValidUserWithNoReadAccess()
+ throws Exception
+ {
+ String commonsLangJar = "commons-lang/commons-lang/2.1/commons-lang-2.1.jar";
+ String expectedArtifactContents = "dummy-commons-lang-artifact";
+
+ File artifactFile = new File( repoRootInternal, commonsLangJar );
+ artifactFile.getParentFile().mkdirs();
+
+ FileUtils.writeStringToFile( artifactFile, expectedArtifactContents, null );
+
+ WebRequest request = new GetMethodWebRequest( "http://machine.com/repository/internal/" + commonsLangJar );
+ InvocationContext ic = sc.newInvocation( request );
+ servlet = (RepositoryServlet) ic.getServlet();
+ servlet.setDavSessionProvider( davSessionProvider );
+
+ ArchivaDavResourceFactory archivaDavResourceFactory = (ArchivaDavResourceFactory) servlet.getResourceFactory();
+ archivaDavResourceFactory.setHttpAuth( httpAuth );
+ archivaDavResourceFactory.setServletAuth( servletAuth );
+
+ servlet.setResourceFactory( archivaDavResourceFactory );
+
+ AuthenticationResult result = new AuthenticationResult();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, null ), true );
+
+ // ArchivaDavResourceFactory#isAuthorized()
+ SecuritySession session = new DefaultSecuritySession();
+ httpAuthControl.expectAndReturn( httpAuth.getAuthenticationResult( null, null ), result );
+ httpAuthControl.expectAndReturn( httpAuth.getSecuritySession(), session );
+ servletAuthControl.expectAndReturn( servletAuth.isAuthenticated( null, result ), true );
+ servletAuthControl.expectAndThrow( servletAuth.isAuthorized( null, session, "internal", true ),
+ new UnauthorizedException( "User not authorized to read repository." ) );
+
+ httpAuthControl.replay();
+ servletAuthControl.replay();
+
+ WebResponse response = sc.getResponse( request );
+
+ httpAuthControl.verify();
+ servletAuthControl.verify();
+ assertEquals( HttpServletResponse.SC_UNAUTHORIZED, response.getResponseCode() );
}
}
{
public UnauthenticatedDavSessionProvider()
{
- super(null, null);
+ super(null, null, null);
}
@Override
--- /dev/null
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee" version="2.4"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd">
+
+ <display-name>Apache Archiva</display-name>
+
+ <listener>
+ <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+ </listener>
+
+ <context-param>
+ <param-name>contextClass</param-name>
+ <param-value>org.codehaus.plexus.spring.PlexusWebApplicationContext</param-value>
+ </context-param>
+
+ <context-param>
+ <param-name>contextConfigLocation</param-name>
+ <param-value>
+ classpath*:/META-INF/plexus/components.xml
+ classpath*:/META-INF/spring-context.xml
+ target/test-classes/org/apache/maven/archiva/webdav/RepositoryServletSecurityTest.xml
+ </param-value>
+ </context-param>
+
+</web-app>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.webdav.DefaultDavServerManager</implementation>
<description>DefaultDavServerManager</description>
- <configuration>
- <provider-hint>proxied</provider-hint>
- </configuration>
+ <requirements>
+ <requirement>
+ <role>org.apache.maven.archiva.webdav.DavServerComponent</role>
+ <role-hint>proxied</role-hint>
+ </requirement>
+ </requirements>
</component>
<component>
<component>
<role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
<role-hint>default</role-hint>
- <implementation>org.apache.maven.archiva.web.repository.StubRepositoryContentConsumers</implementation>
+ <implementation>org.apache.maven.archiva.webdav.StubRepositoryContentConsumers</implementation>
</component>
-
- <!-- TODO: shouldn't need so many components just to use in-memory - is flaky since these are auto-generated -->
+
<component>
<role>org.codehaus.plexus.redback.system.SecuritySystem</role>
<role-hint>default</role-hint>
<implementation>org.codehaus.plexus.redback.system.DefaultSecuritySystem</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.redback.authentication.AuthenticationManager</role>
- <field-name>authnManager</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.redback.authorization.Authorizer</role>
- <role-hint>rbac</role-hint>
- <field-name>authorizer</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.redback.users.UserManager</role>
- <role-hint>memory</role-hint>
- <field-name>userManager</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.redback.keys.KeyManager</role>
- <role-hint>memory</role-hint>
- <field-name>keyManager</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.redback.policy.UserSecurityPolicy</role>
- <field-name>policy</field-name>
- </requirement>
- </requirements>
</component>
-
- <component>
- <role>org.codehaus.plexus.redback.authentication.Authenticator</role>
- <role-hint>user-manager</role-hint>
- <implementation>org.codehaus.plexus.redback.authentication.users.UserManagerAuthenticator</implementation>
+
+ <component>
+ <role>org.apache.maven.archiva.webdav.ArchivaDavResourceFactory</role>
+ <implementation>org.apache.maven.archiva.webdav.ArchivaDavResourceFactory</implementation>
<requirements>
<requirement>
- <role>org.codehaus.plexus.redback.users.UserManager</role>
- <role-hint>memory</role-hint>
- <field-name>userManager</field-name>
+ <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
+ <field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.policy.UserSecurityPolicy</role>
- <field-name>securityPolicy</field-name>
- </requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.authentication.Authenticator</role>
- <role-hint>keystore</role-hint>
- <implementation>org.codehaus.plexus.redback.authentication.keystore.KeyStoreAuthenticator</implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.redback.keys.KeyManager</role>
- <role-hint>memory</role-hint>
- <field-name>keystore</field-name>
- </requirement>
- <requirement>
- <role>org.codehaus.plexus.redback.users.UserManager</role>
- <role-hint>memory</role-hint>
- <field-name>userManager</field-name>
- </requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.authorization.rbac.evaluator.PermissionEvaluator</role>
- <role-hint>default</role-hint>
- <implementation>org.codehaus.plexus.redback.authorization.rbac.evaluator.DefaultPermissionEvaluator
- </implementation>
- <requirements>
- <requirement>
- <role>org.codehaus.plexus.redback.users.UserManager</role>
- <role-hint>memory</role-hint>
- <field-name>userManager</field-name>
- </requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.authorization.Authorizer</role>
- <role-hint>rbac</role-hint>
- <implementation>org.codehaus.plexus.redback.authorization.rbac.RbacAuthorizer</implementation>
- <requirements>
+ <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
+ <field-name>repositoryFactory</field-name>
+ </requirement>
<requirement>
- <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
- <role-hint>memory</role-hint>
- <field-name>manager</field-name>
+ <role>org.apache.maven.archiva.repository.content.RepositoryRequest</role>
+ <field-name>repositoryRequest</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.users.UserManager</role>
- <role-hint>memory</role-hint>
- <field-name>userManager</field-name>
+ <role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
+ <field-name>connectors</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.authorization.rbac.evaluator.PermissionEvaluator</role>
- <role-hint>default</role-hint>
- <field-name>evaluator</field-name>
+ <role>org.apache.maven.archiva.repository.metadata.MetadataTools</role>
+ <field-name>metadataTools</field-name>
</requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.role.RoleManager</role>
- <role-hint>default</role-hint>
- <implementation>org.codehaus.plexus.redback.role.DefaultRoleManager</implementation>
- <instantiation-strategy>singleton</instantiation-strategy>
- <requirements>
<requirement>
- <role>org.codehaus.plexus.redback.role.merger.RoleModelMerger</role>
- <role-hint>default</role-hint>
- <field-name>modelMerger</field-name>
+ <role>org.apache.maven.archiva.security.ServletAuthenticator</role>
+ <field-name>servletAuth</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.role.validator.RoleModelValidator</role>
- <role-hint>default</role-hint>
- <field-name>modelValidator</field-name>
+ <role>org.apache.maven.archiva.webdav.util.MimeTypes</role>
+ <field-name>mimeTypes</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.role.processor.RoleModelProcessor</role>
- <role-hint>default</role-hint>
- <field-name>modelProcessor</field-name>
+ <role>org.codehaus.plexus.redback.xwork.filter.authentication.HttpAuthenticator</role>
+ <role-hint>basic</role-hint>
+ <field-name>httpAuth</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.role.template.RoleTemplateProcessor</role>
+ <role>org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers</role>
<role-hint>default</role-hint>
- <field-name>templateProcessor</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
- <role-hint>memory</role-hint>
- <field-name>rbacManager</field-name>
+ <role>org.codehaus.plexus.digest.ChecksumFile</role>
+ <field-name>checksum</field-name>
</requirement>
<requirement>
- <role>org.codehaus.plexus.PlexusContainer</role>
- <field-name>container</field-name>
+ <role>org.codehaus.plexus.digest.Digester</role>
+ <role-hint>sha1</role-hint>
+ <field-name>digestSha1</field-name>
</requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.role.processor.RoleModelProcessor</role>
- <role-hint>default</role-hint>
- <implementation>org.codehaus.plexus.redback.role.processor.DefaultRoleModelProcessor</implementation>
- <requirements>
<requirement>
- <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
- <role-hint>memory</role-hint>
- <field-name>rbacManager</field-name>
+ <role>org.codehaus.plexus.digest.Digester</role>
+ <role-hint>md5</role-hint>
+ <field-name>digestMd5</field-name>
</requirement>
- </requirements>
- </component>
-
- <component>
- <role>org.codehaus.plexus.redback.role.template.RoleTemplateProcessor</role>
- <role-hint>default</role-hint>
- <implementation>org.codehaus.plexus.redback.role.template.DefaultRoleTemplateProcessor</implementation>
- <requirements>
<requirement>
- <role>org.codehaus.plexus.redback.rbac.RBACManager</role>
- <role-hint>memory</role-hint>
- <field-name>rbacManager</field-name>
- </requirement>
+ <role>org.apache.maven.archiva.security.ArchivaXworkUser</role>
+ <field-name>archivaXworkUser</field-name>
+ </requirement>
</requirements>
</component>
</components>
<role>org.codehaus.plexus.digest.Digester</role>
<role-hint>md5</role-hint>
<field-name>digestMd5</field-name>
+ </requirement>
+ <requirement>
+ <role>org.apache.maven.archiva.security.ArchivaXworkUser</role>
+ <field-name>archivaXworkUser</field-name>
</requirement>
</requirements>
</component>
<module>archiva-webapp</module>
<module>archiva-webdav</module>
<module>archiva-rss</module>
- <module>archiva-xmlrpc</module>
</modules>
<profiles>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.1.0 http://maven.apache.org/maven-v4.1.0.xsd">
- <modelVersion>4.1.0</modelVersion>
-
- <!--
-
- Requires the Maven 2.0.9-terse branch build to build!
-
- -->
-
- <parent groupId="org.apache.maven.archiva" artifactId="archiva-parent" version="2"
- relativePath="../archiva-parent/pom.xml"/>
- <name>Archiva</name>
- <artifactId>archiva</artifactId>
- <packaging>pom</packaging>
- <version>1.1-SNAPSHOT</version>
- <scm connection="scm:svn:http://svn.apache.org/repos/asf/archiva/trunk"
- developerConnection="scm:svn:https://svn.apache.org/repos/asf/archiva/trunk"
- url="http://svn.apache.org/viewcvs.cgi/archiva/trunk"/>
-
- <url>http://maven.apache.org/archiva/</url>
- <build>
- <plugins>
- <plugin groupId="org.codehaus.plexus" artifactId="plexus-maven-plugin" version="1.3.5">
- <executions>
- <execution id="generate">
- <goals>
- <goal>descriptor</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <!-- Doesn't work on Mac with Java 6 yet
- <plugin>
- <artifactId>maven-enforcer-plugin</artifactId>
- <executions>
- <execution>
- <id>enforce-java</id>
- <goals>
- <goal>enforce-once</goal>
- </goals>
- <configuration>
- <rules>
- <requireJavaVersion>
- <version>1.5.0</version>
- </requireJavaVersion>
- </rules>
- </configuration>
- </execution>
- </executions>
- </plugin>
- -->
- <plugin artifactId="maven-compiler-plugin">
- <configuration>
- <source>1.5</source>
- <target>1.5</target>
- </configuration>
- </plugin>
- <plugin artifactId="maven-idea-plugin">
- <configuration>
- <jdkLevel>1.5</jdkLevel>
- </configuration>
- </plugin>
- <plugin artifactId="maven-surefire-plugin" version="2.2"/>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-remote-resources-plugin" version="1.0-alpha-6">
- <executions>
- <execution>
- <goals>
- <goal>process</goal>
- </goals>
- <configuration>
- <resourceBundles>
- <resourceBundle>org.apache:apache-jar-resource-bundle:1.3</resourceBundle>
- </resourceBundles>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- <pluginManagement>
- <plugins>
- <plugin artifactId="maven-idea-plugin">
- <configuration>
- <jdkLevel>1.4</jdkLevel>
- </configuration>
- </plugin>
- <plugin artifactId="maven-release-plugin">
- <configuration>
- <tagBase>https://svn.apache.org/repos/asf/archiva/tags</tagBase>
- <preparationGoals>clean install</preparationGoals>
- </configuration>
- </plugin>
- <plugin groupId="org.codehaus.modello" artifactId="modello-maven-plugin" version="1.0-alpha-15"/>
- </plugins>
- </pluginManagement>
- </build>
- <modules>
- <module>archiva-base</module>
- <module>archiva-database</module>
- <module>archiva-reporting</module>
- <module>archiva-scheduled</module>
- <module>archiva-web</module>
- <module>archiva-cli</module>
- <module>archiva-docs</module>
- </modules>
- <dependencies>
- <dependency groupId="junit" artifactId="junit" version="3.8.1" scope="test"/>
- <dependency groupId="easymock" artifactId="easymock" version="1.2_Java1.3" scope="test"/>
- </dependencies>
- <dependencyManagement>
- <dependencies>
- <dependency groupId="javax.activation" artifactId="activation" version="1.1"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-applet" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-artifact-converter" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-artifact-reports" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-common" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-configuration" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-consumer-api" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-converter" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-dependency-graph" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-core" version="${pom.version}"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-core-consumers" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-database" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-database-consumers" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-indexer" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-lucene-consumers" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-model" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-plexus-application" version="1.1-SNAPSHOT"
- type="plexus-application"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-policies" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-proxy" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-report-manager" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-repository-layer" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-scheduled" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-security" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-signature-consumers" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-transaction" version="1.1-SNAPSHOT"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-docs" version="1.1-SNAPSHOT" type="zip"
- classifier="docs"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-webapp" version="1.1-SNAPSHOT" type="war"/>
- <dependency groupId="org.apache.maven.archiva" artifactId="archiva-xml-tools" version="1.1-SNAPSHOT"/>
- <dependency groupId="commons-collections" artifactId="commons-collections" version="3.2"/>
- <dependency groupId="commons-io" artifactId="commons-io" version="1.2"/>
- <dependency groupId="commons-lang" artifactId="commons-lang" version="2.2"/>
- <dependency groupId="commons-logging" artifactId="commons-logging-api" version="1.0.4"/>
- <dependency groupId="org.apache.derby" artifactId="derby" version="10.1.3.1"/>
- <dependency groupId="org.apache.derby" artifactId="derbytools" version="10.1.3.1"/>
- <dependency groupId="dom4j" artifactId="dom4j" version="1.6.1"/>
- <dependency groupId="hsqldb" artifactId="hsqldb" version="1.8.0.7"/>
- <dependency groupId="jaxen" artifactId="jaxen" version="1.1">
- <exclusions>
- <exclusion groupId="xerces" artifactId="xercesImpl"/>
- </exclusions>
- </dependency>
- <dependency groupId="javax.jdo" artifactId="jdo2-api" version="2.0"/>
- <dependency groupId="jdom" artifactId="jdom" version="1.0"/>
- <dependency groupId="jpox" artifactId="jpox" version="1.1.9">
- <exclusions>
- <exclusion groupId="javax.transaction" artifactId="jta"/>
- </exclusions>
- </dependency>
- <dependency groupId="javax.servlet" artifactId="jsp-api" version="2.0" scope="provided"/>
- <dependency groupId="javax.servlet" artifactId="jstl" version="1.1.2"/>
- <dependency groupId="log4j" artifactId="log4j" version="1.2.8"/>
- <dependency groupId="org.apache.lucene" artifactId="lucene-core" version="2.0.0"/>
- <dependency groupId="javax.mail" artifactId="mail" version="1.4"/>
- <dependency groupId="org.apache.maven" artifactId="maven-artifact" version="${maven.version}"/>
- <dependency groupId="org.apache.maven" artifactId="maven-artifact-manager" version="${maven.version}"/>
- <dependency groupId="org.apache.maven" artifactId="maven-model" version="${maven.version}"/>
- <dependency groupId="org.apache.maven" artifactId="maven-project" version="${maven.version}"/>
- <dependency groupId="org.apache.maven" artifactId="maven-repository-metadata" version="${maven.version}"/>
- <dependency groupId="org.apache.maven.shared" artifactId="maven-app-configuration-model" version="1.0"/>
- <dependency groupId="org.apache.maven.shared" artifactId="maven-app-configuration-web" version="1.0">
- <exclusions>
- <exclusion groupId="org.apache.maven.wagon" artifactId="wagon-http-lightweight"/>
- </exclusions>
- </dependency>
- <dependency groupId="org.apache.maven.shared" artifactId="maven-model-converter" version="2.1"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-appserver-host" version="2.0-alpha-8"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-appserver-service-jetty" version="2.0-alpha-8"
- type="plexus-service"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-cli" version="1.1"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-command-line" version="1.0-alpha-2"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-component-api" version="1.0-alpha-22"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-container-default" version="1.0-alpha-22"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-digest" version="1.1"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-expression-evaluator" version="1.0-rc1"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-i18n" version="1.0-beta-6"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-jdo2" version="1.0-alpha-8"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-naming" version="1.0-alpha-3"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-quartz" version="1.0-alpha-3"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-slf4j-logging" version="1.1-alpha-1">
- <exclusions>
- <exclusion groupId="org.slf4j" artifactId="slf4j-simple"/>
- </exclusions>
- </dependency>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-taskqueue" version="1.0-alpha-6"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-utils" version="1.4"/>
- <dependency groupId="org.codehaus.plexus" artifactId="plexus-xwork-integration" version="1.0-alpha-7">
- <exclusions>
- <exclusion groupId="velocity" artifactId="velocity-dep"/>
- </exclusions>
- </dependency>
- <dependency groupId="org.codehaus.plexus.cache" artifactId="plexus-cache-api" version="1.0-alpha-2"/>
- <dependency groupId="org.codehaus.plexus.cache" artifactId="plexus-cache-ehcache" version="1.0-alpha-2"/>
- <dependency groupId="net.sf.ehcache" artifactId="ehcache" version="1.3.0"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-rbac-memory" version="${redback.version}"
- scope="test"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-users-memory" version="${redback.version}"
- scope="test"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-keys-memory" version="${redback.version}"
- scope="test"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-rbac-model" version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-authorization-rbac"
- version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-rbac-role-manager"
- version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-system" version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-taglib" version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-xwork-content" version="${redback.version}"
- type="war">
- <exclusions>
- <exclusion groupId="classworlds" artifactId="classworlds"/>
- </exclusions>
- </dependency>
- <dependency groupId="org.codehaus.plexus.redback" artifactId="redback-xwork-integration"
- version="${redback.version}"/>
- <dependency groupId="org.codehaus.plexus.registry" artifactId="plexus-registry-api" version="1.0-alpha-2"/>
- <dependency groupId="org.codehaus.plexus.registry" artifactId="plexus-registry-commons" version="1.0-alpha-2">
- <exclusions>
- <exclusion groupId="ant" artifactId="ant-optional"/>
- <exclusion groupId="jdom" artifactId="jdom"/>
- <exclusion groupId="commons-logging" artifactId="commons-logging-api"/>
- </exclusions>
- </dependency>
- <dependency groupId="org.codehaus.plexus.webdav" artifactId="plexus-webdav-simple" version="1.0-beta-2"/>
- <dependency groupId="org.codehaus.plexus.webdav" artifactId="plexus-webdav-api" version="1.0-beta-2"/>
- <dependency groupId="javax.servlet" artifactId="servlet-api" version="2.4"/>
- <dependency groupId="opensymphony" artifactId="sitemesh" version="2.2.1"/>
- <dependency groupId="org.slf4j" artifactId="slf4j-log4j12" version="1.2"/>
- <dependency groupId="taglibs" artifactId="standard" version="1.1.2"/>
- <dependency groupId="org.apache.maven.wagon" artifactId="wagon-file" version="${wagon.version}"/>
- <dependency groupId="org.apache.maven.wagon" artifactId="wagon-http" version="${wagon.version}"/>
- <dependency groupId="org.apache.maven.wagon" artifactId="wagon-provider-api" version="${wagon.version}"/>
- <dependency groupId="org.apache.maven.wagon" artifactId="wagon-ssh" version="${wagon.version}"/>
- <dependency groupId="com.opensymphony" artifactId="webwork" version="2.2.6"/>
- <dependency groupId="xmlunit" artifactId="xmlunit" version="1.0"/>
- </dependencies>
- </dependencyManagement>
- <reporting>
- <plugins>
- <plugin groupId="org.codehaus.mojo" artifactId="cobertura-maven-plugin" version="2.2"/>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-checkstyle-plugin" version="2.1">
- <configuration>
- <configLocation>config/maven_checks.xml</configLocation>
- </configuration>
- </plugin>
- <!-- TODO: OOME
- <plugin groupId="org.codehaus.mojo" artifactId="findbugs-maven-plugin" version="1.1.1">
- </plugin>
- -->
- <plugin groupId="org.codehaus.mojo" artifactId="changelog-maven-plugin"/>
- <plugin groupId="org.codehaus.mojo" artifactId="taglist-maven-plugin"/>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-jxr-plugin">
- <configuration>
- <aggregate>true</aggregate>
- </configuration>
- </plugin>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-surefire-report-plugin"/>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-javadoc-plugin">
- <configuration>
- <source>1.5</source>
- <aggregate>true</aggregate>
- <doclet>gr.spinellis.umlgraph.doclet.UmlGraphDoc</doclet>
- <docletArtifact>
- <groupId>gr.spinellis</groupId>
- <artifactId>UmlGraph</artifactId>
- <version>4.6</version>
- </docletArtifact>
- <additionalparam>
- -inferrel -inferdep -quiet -hide java.*
- -collpackages java.util.* -qualify
- -postfixpackage -nodefontsize 9
- -nodefontpackagesize 7
- </additionalparam>
- </configuration>
- </plugin>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-pmd-plugin">
- <!-- TODO: choose appropriate rulesets -->
- <configuration>
- <targetJdk>1.5</targetJdk>
- </configuration>
- </plugin>
- </plugins>
- </reporting>
- <profiles>
- <profile id="ci">
- <activation>
- <property name="enableCiProfile" value="true"/>
- </activation>
- <build>
- <plugins>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-pmd-plugin">
- <executions>
- <execution phase="process-sources">
- <goals>
- <!-- TODO: after rules are set
- <goal>check</goal>
- -->
- <goal>cpd-check</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-checkstyle-plugin">
- <executions>
- <execution phase="process-sources">
- <goals>
- <!-- TODO: reformat first, and correct the checks (some are not consistent with the Maven style)
- <goal>check</goal>
- -->
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin groupId="org.codehaus.mojo" artifactId="cobertura-maven-plugin" version="2.2">
- <configuration>
- <check>
- <!-- TODO: raise to 85/100 -->
- <totalLineRate>77</totalLineRate>
- <totalBranchRate>95</totalBranchRate>
- </check>
- <instrumentation>
- <excludes>
- <exclude>**/*$*</exclude>
- </excludes>
- </instrumentation>
- </configuration>
- <executions>
- <execution id="clean">
- <goals>
- <goal>clean</goal>
- </goals>
- </execution>
- <execution id="check">
- <goals>
- <goal>check</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- <profile id="release">
- <build>
- <plugins>
- <!-- TODO: Remove after maven-parent v6 is released -->
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-gpg-plugin" version="1.0-alpha-3"/>
- <plugin groupId="org.apache.maven.plugins" artifactId="maven-assembly-plugin" version="2.1" inherited="false">
- <configuration>
- <descriptors>
- <descriptor>src/main/assembly/src.xml</descriptor>
- </descriptors>
- <tarLongFileMode>gnu</tarLongFileMode>
- <appendAssemblyId>false</appendAssemblyId>
- <finalName>apache-archiva-${project.version}-src</finalName>
- </configuration>
- <executions>
- <execution id="make-assembly" phase="package">
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- </profile>
- </profiles>
- <properties>
- <maven.version>2.0.5</maven.version>
- <wagon.version>1.0-rc1-SNAPSHOT</wagon.version>
- <redback.version>1.0-alpha-4</redback.version>
- </properties>
- <distributionManagement>
- <site id="apache.website" url="${siteBaseDeployment}/ref/${project.version}"/>
- </distributionManagement>
-</project>
<version>1.2_Java1.3</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>easymock</groupId>
+ <artifactId>easymockclassextension</artifactId>
+ <version>1.2</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl104-over-slf4j</artifactId>
<artifactId>archiva-webdav</artifactId>
<version>1.2-SNAPSHOT</version>
</dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xmlrpc-api</artifactId>
- <version>1.2-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xmlrpc-services</artifactId>
- <version>1.2-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xmlrpc-security</artifactId>
- <version>1.2-SNAPSHOT</version>
- </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-rss</artifactId>
<artifactId>mail</artifactId>
<version>1.4</version>
</dependency>
- <dependency>
- <groupId>org.apache.xmlrpc</groupId>
- <artifactId>xmlrpc-server</artifactId>
- <version>3.1</version>
- </dependency>
<dependency>
<groupId>org.apache.maven</groupId>
<artifactId>maven-artifact-manager</artifactId>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-memory</artifactId>
<version>${redback.version}</version>
<scope>test</scope>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-users-memory</artifactId>
<version>${redback.version}</version>
<scope>test</scope>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-keys-memory</artifactId>
<version>${redback.version}</version>
<scope>test</scope>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-model</artifactId>
<version>${redback.version}</version>
<exclusions>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-authorization-rbac</artifactId>
<version>${redback.version}</version>
<exclusions>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-rbac-role-manager</artifactId>
<version>${redback.version}</version>
<exclusions>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-system</artifactId>
<version>${redback.version}</version>
<exclusions>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-taglib</artifactId>
<version>${redback.version}</version>
<exclusions>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-xwork-content</artifactId>
<version>${redback.version}</version>
<type>war</type>
</exclusions>
</dependency>
<dependency>
- <groupId>org.codehaus.plexus.redback</groupId>
+ <groupId>org.codehaus.redback</groupId>
<artifactId>redback-xwork-integration</artifactId>
<version>${redback.version}</version>
<exclusions>
<version>${jetty.version}</version>
</dependency>
- <!-- xmlrpc -->
- <dependency>
- <groupId>com.atlassian.xmlrpc</groupId>
- <artifactId>atlassian-xmlrpc-binder-annotations</artifactId>
- <version>${binder.version}</version>
- </dependency>
- <dependency>
- <groupId>com.atlassian.xmlrpc</groupId>
- <artifactId>atlassian-xmlrpc-binder-server-spring</artifactId>
- <version>${binder.version}</version>
- <exclusions>
- <exclusion>
- <groupId>commons-logging</groupId>
- <artifactId>commons-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>com.atlassian.xmlrpc</groupId>
- <artifactId>atlassian-xmlrpc-binder</artifactId>
- <version>${binder.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.atlassian.xmlrpc</groupId>
- <artifactId>atlassian-xmlrpc-binder-testing</artifactId>
- <version>${binder.version}</version>
- <scope>test</scope>
- </dependency>
-
<!-- Transitive versions to manage -->
<dependency>
<groupId>org.springframework</groupId>
<properties>
<maven.version>2.0.8</maven.version>
<wagon.version>1.0-beta-4</wagon.version>
- <redback.version>1.0.3</redback.version>
+ <redback.version>1.1.1</redback.version>
<jetty.version>6.1.6</jetty.version>
- <binder.version>0.8</binder.version>
</properties>
<profiles>
<profile>
<name>Codehaus Snapshots Repository</name>
<url>http://snapshots.repository.codehaus.org</url>
</repository>
- <repository>
- <releases>
- <enabled>false</enabled>
- </releases>
- <snapshots>
- <enabled>true</enabled>
- </snapshots>
- <id>com.atlassian.snapshots</id>
- <name>Atlassian Snapshots Repository</name>
- <url>https://maven.atlassian.com/public-snapshot</url>
- </repository>
</repositories>
</profile>
</profiles>