import org.apache.archiva.metadata.model.ProjectMetadata;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.storage.StorageMetadataResolver;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
String projectVersion = VersionUtil.getBaseVersion( artifact.getVersion() );
// TODO: maybe not too efficient since it may have already been read and stored for this artifact
- ProjectVersionMetadata versionMetadata;
- try
- {
- versionMetadata =
- storageResolver.getProjectVersion( repository.getId(), artifact.getGroupId(), artifact.getArtifactId(),
- projectVersion );
- }
- catch ( MetadataResolverException e )
- {
- throw new ConsumerException( e.getMessage(), e );
- }
+ ProjectVersionMetadata versionMetadata =
+ storageResolver.getProjectVersion( repository.getId(), artifact.getGroupId(), artifact.getArtifactId(),
+ projectVersion );
if ( versionMetadata == null )
{
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
log.info( "Finished first scan: " + stats.toDump( arepo ) );
- RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
- repositoryStatistics.setScanStartTime( stats.getWhenGathered() );
- repositoryStatistics.setScanEndTime(
- new Date( stats.getWhenGathered().getTime() + stats.getDuration() ) );
- repositoryStatistics.setTotalFileCount( stats.getTotalFileCount() );
- repositoryStatistics.setNewFileCount( stats.getTotalFileCount() - previousFileCount );
// further statistics will be populated by the following method
- repositoryStatisticsManager.addStatisticsAfterScan( repoId, repositoryStatistics );
+ Date endTime = new Date( stats.getWhenGathered().getTime() + stats.getDuration() );
+ repositoryStatisticsManager.addStatisticsAfterScan( repoId, stats.getWhenGathered(), endTime,
+ stats.getTotalFileCount(),
+ stats.getTotalFileCount() - previousFileCount );
// log.info( "Scanning for removed repository content" );
repoTask.setScanAll( true );
Date date = Calendar.getInstance().getTime();
- RepositoryStatistics stats = new RepositoryStatistics();
- stats.setScanStartTime( new Date( date.getTime() - 1234567 ) );
- stats.setScanEndTime( date );
- stats.setNewFileCount( 8 );
- stats.setTotalArtifactCount( 8 );
- stats.setTotalFileCount( 8 );
- stats.setTotalGroupCount( 3 );
- stats.setTotalProjectCount( 5 );
- stats.setTotalArtifactFileSize( 999999 );
-
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date, 8,
+ 8 );
taskExecutor.executeTask( repoTask );
stats.setTotalProjectCount( 5 );
stats.setTotalArtifactFileSize( 38545 );
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date,
+ 31, 31 );
}
}
: null;
}
- public void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics )
+ public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
+ long newFiles )
{
List<RepositoryStatistics> stats = getStatsList( repositoryId );
+
+ RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
+ repositoryStatistics.setScanStartTime( startTime );
+ repositoryStatistics.setScanEndTime( endTime );
+ repositoryStatistics.setNewFileCount( newFiles );
+ repositoryStatistics.setTotalFileCount( totalFiles );
+
stats.add( repositoryStatistics );
}
import com.sun.syndication.feed.synd.SyndFeed;
import org.apache.archiva.metadata.model.ArtifactMetadata;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.rss.RssFeedEntry;
import org.apache.archiva.rss.RssFeedGenerator;
* Process all versions of the artifact which had a rss feed request.
*/
public SyndFeed process( Map<String, String> reqParams )
- throws MetadataResolverException
{
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
String artifactId = reqParams.get( RssFeedProcessor.KEY_ARTIFACT_ID );
}
private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId )
- throws MetadataResolverException
{
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>();
for ( String repoId : metadataRepository.getRepositories() )
import java.util.Map;
import com.sun.syndication.feed.synd.SyndFeed;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
/**
* Retrieve and process the data that will be fed into the RssFeedGenerator.
public static final String KEY_ARTIFACT_ID = "artifactId";
- SyndFeed process( Map<String, String> reqParams )
- throws MetadataResolverException;
+ SyndFeed process( Map<String, String> reqParams );
}
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataResolver;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
}
public String browseArtifact()
- throws MetadataResolverException
{
if ( StringUtils.isEmpty( groupId ) )
{
}
private void populateSharedModel( Collection<String> selectedRepos, Collection<String> projectVersions )
- throws MetadataResolverException
{
sharedModel = new ProjectVersionMetadata();
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataResolver;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.commons.lang.StringUtils;
/**
// we don't want the implementation being that intelligent - so another resolver to do the
// "just-in-time" nature of picking up the metadata (if appropriate for the repository type) is used
- try
- {
- versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
- }
- catch ( MetadataResolverException e )
- {
- addActionError( "Error occurred resolving metadata for project: " + e.getMessage() );
- return ERROR;
- }
+ versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
if ( versionMetadata != null )
{
repositoryId = repoId;
{
if ( versionMetadata == null )
{
- try
- {
- versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
- }
- catch ( MetadataResolverException e )
- {
- addActionError( "Error occurred resolving metadata for project: " + e.getMessage() );
- return ERROR;
- }
+ versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
}
}
{
if ( versionMetadata == null )
{
- try
- {
- versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
- }
- catch ( MetadataResolverException e )
- {
- addActionError( "Error occurred resolving metadata for project: " + e.getMessage() );
- return ERROR;
- }
+ versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
}
}
{
if ( versionMetadata == null )
{
- try
- {
- versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
- }
- catch ( MetadataResolverException e )
- {
- addActionError( "Error occurred resolving metadata for project: " + e.getMessage() );
- return ERROR;
- }
+ versionMetadata = metadataResolver.getProjectVersion( repoId, groupId, artifactId, version );
}
}
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.FeedException;
import com.sun.syndication.io.SyndFeedOutput;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.rss.processor.RssFeedProcessor;
import org.apache.commons.codec.Decoder;
import org.apache.commons.codec.DecoderException;
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED );
}
- catch ( MetadataResolverException e )
- {
- log.debug( COULD_NOT_GENERATE_FEED_ERROR, e );
- res.sendError( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR );
- }
}
/**
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
public class TestMetadataRepository
implements MetadataRepository
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
throw new UnsupportedOperationException();
}
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException
{
return versions;
}
import com.opensymphony.xwork2.Action;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
public class BrowseActionTest
}
public void testBrowseArtifactNoObservableRepos()
- throws MetadataResolverException
{
setObservableRepos( Collections.<String>emptyList() );
String selectedGroupId = "org.apache";
}
public void testBrowseArtifactNoGroupId()
- throws MetadataResolverException
{
String selectedArtifactId = "apache";
}
public void testBrowseArtifactNoArtifactId()
- throws MetadataResolverException
{
String selectedGroupId = "org.apache";
}
public void testBrowseArtifact()
- throws MetadataResolverException
{
String selectedGroupId = "org.apache";
String selectedArtifactId = "apache";
}
public void testBrowseArtifactWithSnapshots()
- throws MetadataResolverException
{
String selectedGroupId = "org.apache";
String selectedArtifactId = "apache";
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
metadataRepository.getProjectVersion( repoId, namespace, projectId, projectVersion );
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException
{
Collection<String> projectVersions = metadataRepository.getProjectVersions( repoId, namespace, projectId );
Collection<String> storageProjectVersions = storageResolver.getProjectVersions( repoId, namespace, projectId,
{
ProjectMetadata getProject( String repoId, String namespace, String projectId );
- ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId, String projectVersion )
- throws MetadataResolverException;
+ ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
+ String projectVersion );
Collection<String> getArtifactVersions( String repoId, String namespace, String projectId, String projectVersion );
Collection<String> getProjects( String repoId, String namespace );
- Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException;
+ Collection<String> getProjectVersions( String repoId, String namespace, String projectId );
Collection<ArtifactMetadata> getArtifacts( String repoId, String namespace, String projectId,
- String projectVersion );
+ String projectVersion );
}
+++ /dev/null
-package org.apache.archiva.metadata.repository;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-public class MetadataResolverException
- extends Exception
-{
- public MetadataResolverException( String message, Exception cause )
- {
- super( message, cause );
- }
-}
import java.util.Set;
import org.apache.archiva.metadata.repository.MetadataResolver;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
import org.apache.archiva.metadata.repository.storage.maven2.RepositoryModelResolver;
import org.apache.commons.lang.StringUtils;
List remoteRepositories )
throws ArtifactMetadataRetrievalException
{
- try
+ Set<ArtifactVersion> versions = new HashSet<ArtifactVersion>();
+ for ( String repoId : repositoryIds )
{
- Set<ArtifactVersion> versions = new HashSet<ArtifactVersion>();
- for ( String repoId : repositoryIds )
+ Collection<String> projectVersions =
+ metadataResolver.getProjectVersions( repoId, artifact.getGroupId(), artifact.getArtifactId() );
+ for ( String version : projectVersions )
{
- Collection<String> projectVersions =
- metadataResolver.getProjectVersions( repoId, artifact.getGroupId(), artifact.getArtifactId() );
- for ( String version : projectVersions )
- {
- versions.add( new DefaultArtifactVersion( version ) );
- }
+ versions.add( new DefaultArtifactVersion( version ) );
}
-
- return new ArrayList<ArtifactVersion>( versions );
- }
- catch ( MetadataResolverException e )
- {
- throw new ArtifactMetadataRetrievalException( e.getMessage(), e, artifact );
}
+
+ return new ArrayList<ArtifactVersion>( versions );
}
}
}
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.model.ProjectVersionReference;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.filter.AllFilter;
import org.apache.archiva.metadata.repository.filter.Filter;
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
ManagedRepositoryConfiguration repositoryConfiguration =
archivaConfiguration.getConfiguration().findManagedRepositoryById( repoId );
public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
String projectVersion )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
}
public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
- throws MetadataResolverException
{
return null; //To change body of implemented methods use File | Settings | File Templates.
}
import org.apache.archiva.metadata.model.MailingList;
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
-import org.apache.archiva.metadata.repository.MetadataResolverException;
import org.apache.archiva.metadata.repository.filter.ExcludesFilter;
import org.apache.archiva.metadata.repository.storage.StorageMetadataResolver;
import org.apache.archiva.reports.RepositoryProblemFacet;
}
public void testGetProjectVersionMetadata()
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
resolver.getProjectVersion( TEST_REPO_ID, "org.apache.archiva", "archiva-common", "1.2.1" );
}
public void testGetProjectVersionMetadataForTimestampedSnapshot()
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
resolver.getProjectVersion( TEST_REPO_ID, "org.apache", "apache", "5-SNAPSHOT" );
}
public void testGetProjectVersionMetadataForTimestampedSnapshotMissingMetadata()
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
resolver.getProjectVersion( TEST_REPO_ID, "com.example.test", "missing-metadata", "1.0-SNAPSHOT" );
}
public void testGetProjectVersionMetadataForTimestampedSnapshotMalformedMetadata()
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
resolver.getProjectVersion( TEST_REPO_ID, "com.example.test", "malformed-metadata", "1.0-SNAPSHOT" );
}
public void testGetProjectVersionMetadataForTimestampedSnapshotIncompleteMetadata()
- throws MetadataResolverException
{
ProjectVersionMetadata metadata =
resolver.getProjectVersion( TEST_REPO_ID, "com.example.test", "incomplete-metadata", "1.0-SNAPSHOT" );
}
public void testGetProjectVersionMetadataForInvalidPom()
- throws MetadataResolverException
{
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
}
public void testGetProjectVersionMetadataForMislocatedPom()
- throws MetadataResolverException
{
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
}
public void testGetProjectVersionMetadataForMissingPom()
- throws MetadataResolverException
{
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
import java.text.ParseException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
+import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
}
}
- public void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics )
+ private void walkRepository( RepositoryStatistics stats, String repositoryId, String ns )
{
+ for ( String namespace : metadataRepository.getNamespaces( repositoryId, ns ) )
+ {
+ walkRepository( stats, repositoryId, ns + "." + namespace );
+ }
+
+ Collection<String> projects = metadataRepository.getProjects( repositoryId, ns );
+ if ( !projects.isEmpty() )
+ {
+ stats.setTotalGroupCount( stats.getTotalGroupCount() + 1 );
+ stats.setTotalProjectCount( stats.getTotalProjectCount() + projects.size() );
+
+ for ( String project : projects )
+ {
+ for ( String version : metadataRepository.getProjectVersions( repositoryId, ns, project ) )
+ {
+ for ( ArtifactMetadata artifact : metadataRepository.getArtifacts( repositoryId, ns, project,
+ version ) )
+ {
+ stats.setTotalArtifactCount( stats.getTotalArtifactCount() + 1 );
+ stats.setTotalArtifactFileSize( stats.getTotalArtifactFileSize() + artifact.getSize() );
+
+ // TODO: add by type
+ }
+ }
+ }
+ }
+ }
+
+
+ public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles, long newFiles )
+ {
+ RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
+ repositoryStatistics.setScanStartTime( startTime );
+ repositoryStatistics.setScanEndTime( endTime );
+ repositoryStatistics.setTotalFileCount( totalFiles );
+ repositoryStatistics.setNewFileCount( newFiles );
+
// In the future, instead of being tied to a scan we might want to record information in the fly based on
// events that are occurring. Even without these totals we could query much of the information on demand based
// on information from the metadata content repository. In the mean time, we lock information in at scan time.
// Note that if new types are later discoverable due to a code change or new plugin, historical stats will not
// be updated and the repository will need to be rescanned.
- // TODO, populate these and also a count per artifact type
- // populate total artifact count from content repository
-// repositoryStatistics.setTotalArtifactCount( );
- // populate total size from content repository
-// repositoryStatistics.setTotalArtifactFileSize( );
- // populate total group count from content repository
-// repositoryStatistics.setTotalGroupCount( );
- // populate total project count from content repository
-// repositoryStatistics.setTotalProjectCount( );
+ long startWalk = System.currentTimeMillis();
+ // TODO: we can probably get a more efficient implementation directly from the metadata repository, but for now
+ // we just walk it. Alternatively, we could build an index, or store the aggregate information and update
+ // it on the fly
+ for ( String ns : metadataRepository.getRootNamespaces( repositoryId ) )
+ {
+ walkRepository( repositoryStatistics, repositoryId, ns );
+ }
+ log.info( "Repository walk for statistics executed in " + ( System.currentTimeMillis() - startWalk ) + "ms" );
metadataRepository.addMetadataFacet( repositoryId, repositoryStatistics );
}
totalProjectCount = Long.valueOf( properties.get( "totalProjectCount" ) );
newFileCount = Long.valueOf( properties.get( "newFileCount" ) );
}
+
+ @Override
+ public String toString()
+ {
+ return "RepositoryStatistics{" + "scanEndTime=" + scanEndTime + ", scanStartTime=" + scanStartTime +
+ ", totalArtifactCount=" + totalArtifactCount + ", totalArtifactFileSize=" + totalArtifactFileSize +
+ ", totalFileCount=" + totalFileCount + ", totalGroupCount=" + totalGroupCount + ", totalProjectCount=" +
+ totalProjectCount + ", newFileCount=" + newFileCount + '}';
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ RepositoryStatistics that = (RepositoryStatistics) o;
+
+ if ( newFileCount != that.newFileCount )
+ {
+ return false;
+ }
+ if ( totalArtifactCount != that.totalArtifactCount )
+ {
+ return false;
+ }
+ if ( totalArtifactFileSize != that.totalArtifactFileSize )
+ {
+ return false;
+ }
+ if ( totalFileCount != that.totalFileCount )
+ {
+ return false;
+ }
+ if ( totalGroupCount != that.totalGroupCount )
+ {
+ return false;
+ }
+ if ( totalProjectCount != that.totalProjectCount )
+ {
+ return false;
+ }
+ if ( !scanEndTime.equals( that.scanEndTime ) )
+ {
+ return false;
+ }
+ if ( !scanStartTime.equals( that.scanStartTime ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ int result = scanEndTime.hashCode();
+ result = 31 * result + scanStartTime.hashCode();
+ result = 31 * result + (int) ( totalArtifactCount ^ ( totalArtifactCount >>> 32 ) );
+ result = 31 * result + (int) ( totalArtifactFileSize ^ ( totalArtifactFileSize >>> 32 ) );
+ result = 31 * result + (int) ( totalFileCount ^ ( totalFileCount >>> 32 ) );
+ result = 31 * result + (int) ( totalGroupCount ^ ( totalGroupCount >>> 32 ) );
+ result = 31 * result + (int) ( totalProjectCount ^ ( totalProjectCount >>> 32 ) );
+ result = 31 * result + (int) ( newFileCount ^ ( newFileCount >>> 32 ) );
+ return result;
+ }
}
{
RepositoryStatistics getLastStatistics( String repositoryId );
- void addStatisticsAfterScan( String repositoryId, RepositoryStatistics repositoryStatistics );
+ void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles, long newFiles );
void deleteStatistics( String repositoryId );
import java.util.Map;
import junit.framework.TestCase;
+import org.apache.archiva.metadata.model.ArtifactMetadata;
import org.apache.archiva.metadata.repository.MetadataRepository;
import org.easymock.MockControl;
RepositoryStatistics stats = createTestStats( startTime, current );
+ walkRepository( 1 );
+
metadataRepository.addMetadataFacet( TEST_REPO_ID, stats );
metadataRepositoryControl.expectAndReturn(
metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime, current, 56345, 45 );
stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
assertNotNull( stats );
- assertEquals( 1400032000L, stats.getTotalArtifactFileSize() );
+ assertEquals( 246900, stats.getTotalArtifactFileSize() );
assertEquals( 45, stats.getNewFileCount() );
- assertEquals( 10412, stats.getTotalArtifactCount() );
- assertEquals( 2036, stats.getTotalProjectCount() );
- assertEquals( 531, stats.getTotalGroupCount() );
+ assertEquals( 20, stats.getTotalArtifactCount() );
+ assertEquals( 5, stats.getTotalProjectCount() );
+ assertEquals( 4, stats.getTotalGroupCount() );
assertEquals( 56345, stats.getTotalFileCount() );
assertEquals( current.getTime() - 12345, stats.getScanStartTime().getTime() );
assertEquals( current, stats.getScanEndTime() );
public void testDeleteStats()
{
+ walkRepository( 2 );
+
Date current = new Date();
Date startTime1 = new Date( current.getTime() - 12345 );
metadataRepositoryControl.replay();
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats1 );
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats2 );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime1, stats1.getScanEndTime(), 56345,
+ 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime2, stats2.getScanEndTime(), 56345,
+ 45 );
assertNotNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
public void testGetStatsRangeInside()
{
+ walkRepository( 3 );
+
Date current = new Date();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
}
List<RepositoryStatistics> list =
public void testGetStatsRangeUpperOutside()
{
+ walkRepository( 3 );
+
Date current = new Date();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
}
List<RepositoryStatistics> list =
public void testGetStatsRangeLowerOutside()
{
+ walkRepository( 3 );
+
Date current = new Date();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
}
List<RepositoryStatistics> list =
public void testGetStatsRangeLowerAndUpperOutside()
{
+ walkRepository( 3 );
+
Date current = new Date();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
}
List<RepositoryStatistics> list =
public void testGetStatsRangeNotInside()
{
+ walkRepository( 3 );
+
Date current = new Date();
addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
for ( RepositoryStatistics stats : statsCreated.values() )
{
- repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
}
List<RepositoryStatistics> list =
statsCreated.put( stats.getName(), stats );
}
+ private ArtifactMetadata createArtifact( String namespace, String projectId, String projectVersion, String type )
+ {
+ ArtifactMetadata metadata = new ArtifactMetadata();
+ metadata.setRepositoryId( TEST_REPO_ID );
+ metadata.setId( projectId + "-" + projectVersion + "." + type );
+ metadata.setProject( projectId );
+ metadata.setSize( 12345L );
+ metadata.setVersion( projectVersion );
+ metadata.setNamespace( namespace );
+ return metadata;
+ }
+
private RepositoryStatistics createTestStats( Date startTime, Date endTime )
{
RepositoryStatistics stats = new RepositoryStatistics();
stats.setScanStartTime( startTime );
stats.setScanEndTime( endTime );
- stats.setTotalArtifactFileSize( 1400032000L );
+ stats.setTotalArtifactFileSize( 20 * 12345L );
stats.setNewFileCount( 45 );
- stats.setTotalArtifactCount( 10412 );
- stats.setTotalProjectCount( 2036 );
- stats.setTotalGroupCount( 531 );
+ stats.setTotalArtifactCount( 20 );
+ stats.setTotalProjectCount( 5 );
+ stats.setTotalGroupCount( 4 );
stats.setTotalFileCount( 56345 );
return stats;
}
+
+ private void walkRepository( int count )
+ {
+ for ( int i = 0; i < count; i++ )
+ {
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getRootNamespaces( TEST_REPO_ID ),
+ Arrays.asList( "com", "org" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "com" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "com" ),
+ Arrays.asList( "example" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "com.example" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "com.example" ),
+ Arrays.asList( "example-project" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "com.example", "example-project" ),
+ Arrays.asList( "1.0", "1.1" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "com.example", "example-project", "1.0" ),
+ Arrays.asList( createArtifact( "com.example", "example-project", "1.0", "jar" ),
+ createArtifact( "com.example", "example-project", "1.0", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "com.example", "example-project", "1.1" ),
+ Arrays.asList( createArtifact( "com.example", "example-project", "1.1", "jar" ),
+ createArtifact( "com.example", "example-project", "1.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org" ),
+ Arrays.asList( "apache", "codehaus" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache" ),
+ Arrays.asList( "archiva", "maven" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org.apache" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache.archiva" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.apache.archiva" ),
+ Arrays.asList( "metadata-repository-api", "metadata-model" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api" ),
+ Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api",
+ "1.3-SNAPSHOT" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT",
+ "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.archiva", "metadata-model" ),
+ Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT",
+ "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache.maven" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.apache.maven" ), Arrays.asList( "maven-model" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.maven", "maven-model" ),
+ Arrays.asList( "2.2.1" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.maven", "maven-model", "2.2.1" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "2.2.1", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "2.2.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org.codehaus" ),
+ Arrays.asList( "plexus" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org.codehaus" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.codehaus.plexus" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.codehaus.plexus" ),
+ Arrays.asList( "plexus-spring" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring" ),
+ Arrays.asList( "1.0", "1.1", "1.2" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.0" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.1" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "pom" ) ) );
+ }
+ }
}