<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-dependency-tree-consumer</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <version>1.4-SNAPSHOT</version>
<scope>runtime</scope>
</dependency>
- <version>1.3-SNAPSHOT</version>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-metadata-consumer</artifactId>
+ <scope>runtime</scope>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-xml-tools</artifactId>
</transformers>
<artifactSet>
<excludes>
-- <exclude>xerces:xercesImpl</exclude>
++ <exclude>xerces:xercesImpl</exclude>
<exclude>xml-apis:xml-apis</exclude>
<exclude>xalan:xalan</exclude>
<exclude>commons-beanutils:commons-beanutils</exclude>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-consumers</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>archiva-metadata-consumer</artifactId>
+ <name>Archiva Metadata Consumer</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-consumer-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-model</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-layer</artifactId>
+ </dependency>
+ </dependencies>
+</project>
* under the License.
*/
++import java.text.ParseException;
++import java.text.SimpleDateFormat;
++import java.util.Calendar;
++import java.util.Date;
++import java.util.HashMap;
++import java.util.Map;
++
++import org.apache.archiva.metadata.model.MetadataFacet;
++
/**
* AuditEvent
-- *
++ *
* @version $Id$
*/
public class AuditEvent
++ implements MetadataFacet
{
++ public static final String TIMESTAMP_FORMAT = "yyyy/MM/dd/HHmmss.SSS";
++
public static final String CREATE_DIR = "Created Directory";
public static final String CREATE_FILE = "Created File";
public static final String DISABLE_REPO_CONSUMER = "Disabled Content Consumer";
-- public static final String ENABLE_DB_CONSUMER = "Enabled Database Consumer";
--
-- public static final String DISABLE_DB_CONSUMER = "Disabled Database Consumer";
--
public static final String ADD_PATTERN = "Added File Type Pattern";
public static final String REMOVE_PATTERN = "Removed File Type Pattern";
-- public static final String DB_SCHEDULE = "Modified Scanning Schedule";
--
private String repositoryId;
private String userId;
private String remoteIP;
++ // TODO: change to artifact reference? does it ever refer to just a path?
++
private String resource;
private String action;
++ private Date timestamp;
++
++ public static final String FACET_ID = "org.apache.archiva.audit";
++
public AuditEvent()
{
/* do nothing */
}
++ public AuditEvent( String name, String repositoryId )
++ {
++ try
++ {
++ timestamp = new SimpleDateFormat( TIMESTAMP_FORMAT ).parse( name );
++ }
++ catch ( ParseException e )
++ {
++ throw new IllegalArgumentException( "Improperly formatted timestamp for audit log event: " + name );
++ }
++ this.repositoryId = repositoryId;
++ }
++
public AuditEvent( String repoId, String user, String resource, String action )
{
this.repositoryId = repoId;
this.userId = user;
this.resource = resource;
this.action = action;
-- }
--
-- public AuditEvent( String user, String resource, String action )
-- {
-- this( null, user, resource, action );
-- }
--
-- public AuditEvent( String principal, String action2 )
-- {
-- this( null, principal, action2 );
++ this.timestamp = Calendar.getInstance().getTime();
}
public String getRepositoryId()
{
this.remoteIP = remoteIP;
}
++
++ public Date getTimestamp()
++ {
++ return timestamp;
++ }
++
++ public void setTimestamp( Date timestamp )
++ {
++ this.timestamp = timestamp;
++ }
++
++ public String getFacetId()
++ {
++ return FACET_ID;
++ }
++
++ public String getName()
++ {
++ return new SimpleDateFormat( TIMESTAMP_FORMAT ).format( timestamp );
++ }
++
++ public Map<String, String> toProperties()
++ {
++ Map<String, String> properties = new HashMap<String, String>();
++ properties.put( "action", this.action );
++ if ( this.userId != null )
++ {
++ properties.put( "user", this.userId );
++ }
++ if ( this.remoteIP != null )
++ {
++ properties.put( "remoteIP", this.remoteIP );
++ }
++ if ( this.resource != null )
++ {
++ properties.put( "resource", this.resource );
++ }
++ return properties;
++ }
++
++ public void fromProperties( Map<String, String> properties )
++ {
++ this.action = properties.get( "action" );
++ this.remoteIP = properties.get( "remoteIP" );
++ this.userId = properties.get( "user" );
++ this.resource = properties.get( "resource" );
++ }
++
++ @Override
++ public boolean equals( Object o )
++ {
++ if ( this == o )
++ {
++ return true;
++ }
++ if ( o == null || getClass() != o.getClass() )
++ {
++ return false;
++ }
++
++ AuditEvent that = (AuditEvent) o;
++
++ if ( !action.equals( that.action ) )
++ {
++ return false;
++ }
++ if ( remoteIP != null ? !remoteIP.equals( that.remoteIP ) : that.remoteIP != null )
++ {
++ return false;
++ }
++ if ( repositoryId != null ? !repositoryId.equals( that.repositoryId ) : that.repositoryId != null )
++ {
++ return false;
++ }
++ if ( resource != null ? !resource.equals( that.resource ) : that.resource != null )
++ {
++ return false;
++ }
++ if ( !timestamp.equals( that.timestamp ) )
++ {
++ return false;
++ }
++ if ( userId != null ? !userId.equals( that.userId ) : that.userId != null )
++ {
++ return false;
++ }
++
++ return true;
++ }
++
++ @Override
++ public int hashCode()
++ {
++ int result = repositoryId != null ? repositoryId.hashCode() : 0;
++ result = 31 * result + ( userId != null ? userId.hashCode() : 0 );
++ result = 31 * result + ( remoteIP != null ? remoteIP.hashCode() : 0 );
++ result = 31 * result + ( resource != null ? resource.hashCode() : 0 );
++ result = 31 * result + action.hashCode();
++ result = 31 * result + timestamp.hashCode();
++ return result;
++ }
++
++ @Override
++ public String toString()
++ {
++ return "AuditEvent{" + "repositoryId='" + repositoryId + '\'' + ", userId='" + userId + '\'' + ", remoteIP='" +
++ remoteIP + '\'' + ", resource='" + resource + '\'' + ", action='" + action + '\'' + ", timestamp=" +
++ timestamp + '}';
++ }
}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-base</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>archiva-repository-scanner</artifactId>
+ <name>Archiva Repository Scanner</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-consumer-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-configuration</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-scheduler</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>archiva-scheduler-api</artifactId>
+ <name>Archiva Base :: Scheduled Tasks :: API</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-taskqueue</artifactId>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-scheduler</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>archiva-scheduler-indexing</artifactId>
+ <name>Archiva Base :: Scheduled Tasks :: Indexing</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduler-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-configuration</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.sonatype.nexus</groupId>
+ <artifactId>nexus-indexer</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-component-metadata</artifactId>
+ <executions>
+ <execution>
+ <id>merge</id>
+ <goals>
+ <goal>merge-metadata</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+ <descriptor>${project.build.outputDirectory}/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-scheduler</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>archiva-scheduler-repository</artifactId>
+ <name>Archiva Base :: Scheduled Tasks :: Repository Scanning</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduler-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>repository-statistics</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-configuration</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-scanner</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-layer</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-quartz</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-component-metadata</artifactId>
+ <executions>
+ <execution>
+ <id>merge</id>
+ <phase>process-resources</phase>
+ <goals>
+ <goal>merge-metadata</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/components-fragment.xml</descriptor>
+ <descriptor>${project.build.outputDirectory}/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-modules</artifactId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>archiva-scheduler</artifactId>
+ <packaging>pom</packaging>
+ <name>Archiva Base :: Scheduled Tasks</name>
+ <modules>
+ <module>archiva-scheduler-api</module>
+ <module>archiva-scheduler-indexing</module>
+ <module>archiva-scheduler-repository</module>
+ </modules>
+</project>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-report-manager</artifactId>
+ <artifactId>problem-reports</artifactId>
</dependency>
- <artifactId>archiva-artifact-reports</artifactId>
- <scope>runtime</scope>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
++ <artifactId>audit</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-scheduled</artifactId>
+ <artifactId>archiva-scheduler-repository</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-database-consumers</artifactId>
+ <artifactId>archiva-metadata-consumer</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>atlassian-xmlrpc-binder-server-spring</artifactId>
<scope>runtime</scope>
</dependency>
- <version>1.3-SNAPSHOT</version>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-model</artifactId>
- <version>1.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
- <version>1.3-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-file</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <scope>runtime</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId> <!-- FIXME: temporary coupling to plugin, should be runtime -->
+ <artifactId>maven2-repository</artifactId>
+ </dependency>
</dependencies>
<build>
<resources>
\r
protected void triggerAuditEvent( String resource, String action )\r
{\r
-- AuditEvent event = new AuditEvent( getPrincipal(), resource, action );\r
++ AuditEvent event = new AuditEvent( null, getPrincipal(), resource, action );\r
event.setRemoteIP( getRemoteAddr() );\r
\r
for ( AuditListener listener : auditListeners )\r
\r
protected void triggerAuditEvent( String action )\r
{\r
-- AuditEvent event = new AuditEvent( getPrincipal(), action );\r
++ AuditEvent event = new AuditEvent( null, getPrincipal(), null, action );\r
event.setRemoteIP( getRemoteAddr() );\r
\r
for ( AuditListener listener : auditListeners )\r
private RepositoryContentFactory repositoryFactory;
/**
- * @plexus.requirement
+ * @plexus.requirement role="org.apache.archiva.scheduler.ArchivaTaskScheduler" role-hint="repository"
*/
private ArchivaTaskScheduler scheduler;
-
+
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[]{ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5};
- private ProjectModelWriter pomWriter = new ProjectModel400Writer();
-
public void setArtifact( File file )
{
this.artifactFile = file;
--- /dev/null
-import org.apache.maven.archiva.database.ArchivaAuditLogsDao;
-import org.apache.maven.archiva.database.ArchivaDAO;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ObjectNotFoundException;
-import org.apache.maven.archiva.database.SimpleConstraint;
-import org.apache.maven.archiva.database.constraints.ArchivaAuditLogsConstraint;
-import org.apache.maven.archiva.database.constraints.MostRecentArchivaAuditLogsConstraint;
-import org.apache.maven.archiva.model.ArchivaAuditLogs;
+ package org.apache.maven.archiva.web.action.reports;
+
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ import java.util.ArrayList;
+ import java.util.Calendar;
+ import java.util.Collections;
+ import java.util.Date;
+ import java.util.List;
+
+ import javax.servlet.http.HttpServletRequest;
+
++import org.apache.archiva.audit.AuditManager;
+ import org.apache.commons.lang.StringUtils;
+ import org.apache.commons.lang.time.DateUtils;
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArchivaAuditLogsDao auditLogsDao;
-
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArchivaDAO dao;
-
+ import org.apache.maven.archiva.repository.audit.AuditEvent;
+ import org.apache.maven.archiva.security.AccessDeniedException;
+ import org.apache.maven.archiva.security.ArchivaSecurityException;
+ import org.apache.maven.archiva.security.PrincipalNotFoundException;
+ import org.apache.maven.archiva.security.UserRepositories;
+ import org.apache.maven.archiva.web.action.PlexusActionSupport;
+ import org.apache.struts2.interceptor.ServletRequestAware;
+ import org.codehaus.redback.integration.interceptor.SecureAction;
+ import org.codehaus.redback.integration.interceptor.SecureActionBundle;
+ import org.codehaus.redback.integration.interceptor.SecureActionException;
+
+ import com.opensymphony.xwork2.Preparable;
+
+ /**
+ * @plexus.component role="com.opensymphony.xwork2.Action" role-hint="viewAuditLogReport"
+ * instantiation-strategy="per-lookup"
+ */
+ public class ViewAuditLogReportAction
+ extends PlexusActionSupport
+ implements SecureAction, ServletRequestAware, Preparable
+ {
+ protected HttpServletRequest request;
+
+ /**
+ * @plexus.requirement
+ */
+ private UserRepositories userRepositories;
+
- private List<ArchivaAuditLogs> auditLogs;
+ private String repository;
+
+ private List<String> repositories;
+
+ private String groupId;
+
+ private String artifactId;
+
+ private String startDate;
+
+ private String endDate;
+
+ private int rowCount = 30;
+
+ private int page = 1;
+
+ private String prev;
+
+ private String next;
+
+ protected boolean isLastPage = true;
+
- SimpleConstraint constraint = new MostRecentArchivaAuditLogsConstraint();
- auditLogs = (List<ArchivaAuditLogs>) dao.query( constraint );
++ private List<AuditEvent> auditLogs;
+
+ private static final String ALL_REPOSITORIES = "all";
+
+ protected int[] range = new int[2];
+
+ private String initial = "true";
+
+ private String headerName;
+
+ private static final String HEADER_LATEST_EVENTS = "Latest Events";
+
+ private static final String HEADER_RESULTS = "Results";
+
+ private String[] datePatterns = new String[] { "MM/dd/yy", "MM/dd/yyyy", "MMMMM/dd/yyyy", "MMMMM/dd/yy",
+ "dd MMMMM yyyy", "dd/MM/yy", "dd/MM/yyyy", "yyyy/MM/dd", "yyyy-MM-dd", "yyyy-dd-MM", "MM-dd-yyyy",
+ "MM-dd-yy" };
+
++ /**
++ * @plexus.requirement
++ */
++ private AuditManager auditManager;
++
+ public SecureActionBundle getSecureActionBundle()
+ throws SecureActionException
+ {
+ return null;
+ }
+
+ public void setServletRequest( HttpServletRequest request )
+ {
+ this.request = request;
+ }
+
+ @SuppressWarnings( "unchecked" )
+ public void prepare()
+ throws Exception
+ {
+ repositories = new ArrayList<String>();
+ repositories.add( ALL_REPOSITORIES );
+ repositories.addAll( getObservableRepositories() );
+
+ auditLogs = null;
+ groupId = "";
+ artifactId = "";
+ repository = "";
+
+ if( Boolean.parseBoolean( initial ) )
+ {
+ headerName = HEADER_LATEST_EVENTS;
+ }
+ else
+ {
+ headerName = HEADER_RESULTS;
+ }
+
- auditLogs = null;
- String artifact = "";
-
- if ( groupId != null && !"".equals( groupId.trim() ) )
- {
- artifact = groupId + ( ( artifactId != null && !"".equals( artifactId.trim() ) ) ? ( "/" + artifactId + "/%" ) : "%" );
- }
- else
- {
- artifact = ( artifactId != null && !"".equals( artifactId.trim() ) ) ? ( "%" + artifactId + "%" ) : "";
- }
-
- Date startDateInDF = null;
- Date endDateInDF = null;
++ auditLogs = auditManager.getMostRecentAuditEvents();
+ }
+
+ public String execute()
+ throws Exception
+ {
-
- ArchivaAuditLogsConstraint constraint = null;
- if ( !repository.equals( ALL_REPOSITORIES ) )
- {
- constraint =
- new ArchivaAuditLogsConstraint( range, artifact, repository, AuditEvent.UPLOAD_FILE, startDateInDF, endDateInDF );
- }
- else
- {
- constraint =
- new ArchivaAuditLogsConstraint( range, artifact, null, AuditEvent.UPLOAD_FILE, startDateInDF, endDateInDF );
- }
++ Date startDateInDF;
++ Date endDateInDF;
+ if ( startDate == null || "".equals( startDate ) )
+ {
+ Calendar cal = Calendar.getInstance();
+ cal.set( Calendar.HOUR, 0 );
+ cal.set( Calendar.MINUTE, 0 );
+ cal.set( Calendar.SECOND, 0 );
+
+ startDateInDF = cal.getTime();
+ }
+ else
+ {
+ startDateInDF = DateUtils.parseDate( startDate, datePatterns );
+ }
+
+ if ( endDate == null || "".equals( endDate ) )
+ {
+ endDateInDF = Calendar.getInstance().getTime();
+ }
+ else
+ {
+ endDateInDF = DateUtils.parseDate( endDate, datePatterns );
+ Calendar cal = Calendar.getInstance();
+ cal.setTime( endDateInDF );
+ cal.set( Calendar.HOUR, 23 );
+ cal.set( Calendar.MINUTE, 59 );
+ cal.set( Calendar.SECOND, 59 );
+
+ endDateInDF = cal.getTime();
+ }
+
+ range[0] = ( page - 1 ) * rowCount;
+ range[1] = ( page * rowCount ) + 1;
- try
- {
- auditLogs = auditLogsDao.queryAuditLogs( constraint );
- if( auditLogs.isEmpty() )
- {
- addActionError( "No audit logs found." );
- initial = "true";
- }
- else
- {
- initial = "false";
- }
-
- headerName = HEADER_RESULTS;
- paginate();
- }
- catch ( ObjectNotFoundException e )
+
- return ERROR;
++ String repo = repository.equals( ALL_REPOSITORIES ) ? null : repository;
++ // TODO: query by artifact
++ auditLogs = auditManager.getAuditEventsInRange( repo, startDateInDF, endDateInDF );
++
++ if( auditLogs.isEmpty() )
+ {
+ addActionError( "No audit logs found." );
- catch ( ArchivaDatabaseException e )
++ initial = "true";
+ }
- addActionError( "Error occurred while querying audit logs." );
- return ERROR;
++ else
+ {
- public List<ArchivaAuditLogs> getAuditLogs()
++ initial = "false";
+ }
+
++ headerName = HEADER_RESULTS;
++ paginate();
++
+ return SUCCESS;
+ }
+
+ private void paginate()
+ {
+ if ( auditLogs.size() <= rowCount )
+ {
+ isLastPage = true;
+ }
+ else
+ {
+ isLastPage = false;
+ auditLogs.remove( rowCount );
+ }
+
+ prev =
+ request.getRequestURL() + "?page=" + ( page - 1 ) + "&rowCount=" + rowCount + "&groupId=" + groupId +
+ "&artifactId=" + artifactId + "&repository=" + repository + "&startDate=" + startDate + "&endDate=" +
+ endDate;
+
+ next =
+ request.getRequestURL() + "?page=" + ( page + 1 ) + "&rowCount=" + rowCount + "&groupId=" + groupId +
+ "&artifactId=" + artifactId + "&repository=" + repository + "&startDate=" + startDate + "&endDate=" +
+ endDate;
+
+ prev = StringUtils.replace( prev, " ", "%20" );
+ next = StringUtils.replace( next, " ", "%20" );
+ }
+
+ private List<String> getObservableRepositories()
+ {
+ try
+ {
+ return userRepositories.getObservableRepositoryIds( getPrincipal() );
+ }
+ catch ( PrincipalNotFoundException e )
+ {
+ log.warn( e.getMessage(), e );
+ }
+ catch ( AccessDeniedException e )
+ {
+ log.warn( e.getMessage(), e );
+ }
+ catch ( ArchivaSecurityException e )
+ {
+ log.warn( e.getMessage(), e );
+ }
+ return Collections.emptyList();
+ }
+
+ public String getRepository()
+ {
+ return repository;
+ }
+
+ public void setRepository( String repository )
+ {
+ this.repository = repository;
+ }
+
+ public List<String> getRepositories()
+ {
+ return repositories;
+ }
+
+ public void setRepositories( List<String> repositories )
+ {
+ this.repositories = repositories;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
- public void setAuditLogs( List<ArchivaAuditLogs> auditLogs )
- {
- this.auditLogs = auditLogs;
- }
-
++ public List<AuditEvent> getAuditLogs()
+ {
+ return auditLogs;
+ }
+
+ public int getRowCount()
+ {
+ return rowCount;
+ }
+
+ public void setRowCount( int rowCount )
+ {
+ this.rowCount = rowCount;
+ }
+
+ public String getStartDate()
+ {
+ return startDate;
+ }
+
+ public void setStartDate( String startDate )
+ {
+ this.startDate = startDate;
+ }
+
+ public String getEndDate()
+ {
+ return endDate;
+ }
+
+ public void setEndDate( String endDate )
+ {
+ this.endDate = endDate;
+ }
+
+ public int getPage()
+ {
+ return page;
+ }
+
+ public void setPage( int page )
+ {
+ this.page = page;
+ }
+
+ public boolean getIsLastPage()
+ {
+ return isLastPage;
+ }
+
+ public void setIsLastPage( boolean isLastPage )
+ {
+ this.isLastPage = isLastPage;
+ }
+
+ public String getPrev()
+ {
+ return prev;
+ }
+
+ public void setPrev( String prev )
+ {
+ this.prev = prev;
+ }
+
+ public String getNext()
+ {
+ return next;
+ }
+
+ public void setNext( String next )
+ {
+ this.next = next;
+ }
+
+ public String getInitial()
+ {
+ return initial;
+ }
+
+ public void setInitial( String initial )
+ {
+ this.initial = initial;
+ }
+
+ public String getHeaderName()
+ {
+ return headerName;
+ }
+
+ public void setHeaderName( String headerName )
+ {
+ this.headerName = headerName;
+ }
+ }
--- /dev/null
- <link rel="stylesheet" href="<c:url value='/css/ui.datepicker.css'/>" type="text/css" media="all"/>
- <script type="text/javascript" src="<c:url value='/js/jquery/jquery-1.2.6.pack.js'/>"></script>
- <script type="text/javascript" src="<c:url value='/js/jquery/ui.datepicker.packed.js'/>"></script>
+ <%--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --%>
+
+ <%@ taglib prefix="s" uri="/struts-tags" %>
+ <%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
+ <%@ taglib uri="http://www.extremecomponents.org" prefix="ec" %>
+
+ <html>
+ <head>
+
+ <title>Audit Log Report</title>
+ <s:head theme="ajax" />
+
- <th>Artifact</th>
++ <link rel="stylesheet" href="<c:url value='/css/no-theme/jquery-ui-1.7.2.custom.css'/>" type="text/css" />
++ <script type="text/javascript" src="<c:url value='/js/jquery-1.3.2.min.js'/>"></script>
++ <script type="text/javascript" src="<c:url value='/js/jquery-ui-1.7.2.custom.min.js'/>"></script>
+ <script type="text/javascript" charset="utf-8">
+ $(document).ready(function()
+ {
+ $('#startDate').datepicker()
+ $('#endDate').datepicker()
+ });
+ </script>
+ </head>
+
+ <body>
+ <h1>Audit Log Report</h1>
+
+ <div id="contentArea">
+
+ <s:form action="viewAuditLogReport" namespace="/report" validate="false">
+
+ <s:hidden name="initial"/>
+
+ <div id="auditLogReport">
+ <table id="auditLogFieds">
+ <tbody>
+ <tr>
+ <td>Repository: </td>
+ <td><s:select name="repository" list="repositories" theme="simple"/></td>
+ <tr>
+ <tr>
+ <td>Group ID: </td>
+ <td><s:textfield id="groupId" name="groupId" theme="simple"/></td>
+ <tr>
+ <tr>
+ <td>Artifact ID: </td>
+ <td><s:textfield id="artifactId" name="artifactId" theme="simple"/></td>
+ <tr>
+ <tr>
+ <td>Start Date: </td>
+ <td><s:textfield id="startDate" name="startDate" theme="simple"/>
+ <%--
+ <script type="text/javascript">
+ Calendar.setup({
+ inputField : "startDate",
+ ifFormat : "%Y-%m-%d",
+ align : "Tl",
+ singleClick : true
+ });
+ </script>
+ --%>
+ </td>
+ <tr>
+ <tr>
+ <td>End Date: </td>
+ <td><s:textfield id="endDate" name="endDate" theme="simple"/>
+ <%--
+ <script type="text/javascript">
+ Calendar.setup({
+ inputField : "endDate",
+ ifFormat : "%Y-%m-%d",
+ align : "Tl",
+ singleClick : true
+ });
+ </script>
+ --%>
+ </td>
+ <tr>
+ <tr>
+ <td>Row Count: </td>
+ <td><s:textfield name="rowCount" theme="simple"/></td>
+ <tr>
+ <tr>
+ <td/>
+ <td style="text-align: right"><s:submit value="View Audit Log" theme="simple"/></td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+
+ <p/>
+
+ <div class="auditLogReportResults">
+
+ <h2>${headerName}</h2>
+ <p>
+ <s:actionerror/>
+ </p>
+
+ <c:if test="${not empty (auditLogs)}">
+ <table class="auditlogs" cellspacing="0">
+ <tr>
+ <th>Event</th>
+ <th>Repository</th>
- <td>${auditLog.event}</td>
++ <th>Resource</th>
+ <th>Event Date</th>
+ <th>Username</th>
+ </tr>
+
+ <c:forEach items="${auditLogs}" var="auditLog" varStatus="i">
+ <tr>
- <td>${auditLog.artifact}</td>
- <td>${auditLog.eventDate}</td>
- <td>${auditLog.username}</td>
++ <td>${auditLog.action}</td>
+ <td>${auditLog.repositoryId}</td>
++ <td>${auditLog.resource}</td>
++ <td>${auditLog.timestamp}</td>
++ <td>${auditLog.userId}</td>
+ </tr>
+ </c:forEach>
+ </table>
+
+ <s:set name="page" value="page"/>
+ <c:if test="${page > 1}"><a href="<s:property value='prev' />"><<</a></c:if>
+ <strong>Page: </strong>${page}
+ <s:set name="isLastPage" value="isLastPage"/>
+ <c:if test="${!isLastPage}"><a href="<s:property value='next' />">>></a></c:if>
+ </c:if>
+ </div>
+
+ </s:form>
+
+
+ </div>
+
+ </body>
+ </html>
<th>Archetypes</th>
<th>Jars</th>
<th>Wars</th>
- <th>Deployments</th>
- <th>Downloads</th>
+ <th>Ears</th>
+ <th>Exes</th>
+ <th>Dlls</th>
+ <th>Zips</th>
</tr>
- <c:forEach var="stats" items="${repositoryStatistics}">
+ <c:forEach var="stats" items="${repositoryStatistics}" varStatus="i">
<tr>
- <td>${stats.repositoryId}</td>
- <td align="right">${stats.fileCount}</td>
- <td align="right">${stats.totalSize}</td>
- <td align="right">${stats.artifactCount}</td>
- <td align="right">${stats.groupCount}</td>
- <td align="right">${stats.projectCount}</td>
- <td align="right">${stats.pluginCount}</td>
- <td align="right">${stats.archetypeCount}</td>
- <td align="right">${stats.jarCount}</td>
- <td align="right">${stats.warCount}</td>
- <td align="right">${stats.earCount}</td>
- <td align="right">${stats.exeCount}</td>
- <td align="right">${stats.dllCount}</td>
- <td align="right">${stats.zipCount}</td>
- <td align="right">${stats.deploymentCount}</td>
- <td align="right">${stats.downloadCount}</td>
- </tr>
+ <td>${selectedRepositories[i.count-1]}</td>
+ <td align="right">${stats.totalFileCount}</td>
+ <td align="right">${stats.totalArtifactFileSize}</td>
+ <td align="right">${stats.totalArtifactCount}</td>
+ <td align="right">${stats.totalGroupCount}</td>
+ <td align="right">${stats.totalProjectCount}</td>
+ <td align="right">${stats.totalCountForType['maven-plugin']}</td>
+ <td align="right">${stats.totalCountForType['maven-archetype']}</td>
+ <td align="right">${stats.totalCountForType['jar']}</td>
+ <td align="right">${stats.totalCountForType['war']}</td>
++ <td align="right">${stats.totalCountForType['ear']}</td>
++ <td align="right">${stats.totalCountForType['exe']}</td>
++ <td align="right">${stats.totalCountForType['dll']}</td>
++ <td align="right">${stats.totalCountForType['zip']}</td>
+ </tr>
</c:forEach>
</table>
</c:when>
<th>Archetypes</th>
<th>Jars</th>
<th>Wars</th>
- <th>Deployments</th>
- <th>Downloads</th>
- </tr>
+ <th>Ears</th>
+ <th>Exes</th>
+ <th>Dlls</th>
+ <th>Zips</th>
+ </tr>
<c:forEach var="stats" items="${repositoryStatistics}">
<tr>
- <td align="right">${stats.dateOfScan}</td>
- <td align="right">${stats.fileCount}</td>
- <td align="right">${stats.totalSize}</td>
- <td align="right">${stats.artifactCount}</td>
- <td align="right">${stats.groupCount}</td>
- <td align="right">${stats.projectCount}</td>
- <td align="right">${stats.pluginCount}</td>
- <td align="right">${stats.archetypeCount}</td>
- <td align="right">${stats.jarCount}</td>
- <td align="right">${stats.warCount}</td>
- <td align="right">${stats.earCount}</td>
- <td align="right">${stats.exeCount}</td>
- <td align="right">${stats.dllCount}</td>
- <td align="right">${stats.zipCount}</td>
- <td align="right">${stats.deploymentCount}</td>
- <td align="right">${stats.downloadCount}</td>
- </tr>
+ <td align="right">${stats.scanStartTime}</td>
+ <td align="right">${stats.totalFileCount}</td>
+ <td align="right">${stats.totalArtifactFileSize}</td>
+ <td align="right">${stats.totalArtifactCount}</td>
+ <td align="right">${stats.totalGroupCount}</td>
+ <td align="right">${stats.totalProjectCount}</td>
+ <td align="right">${stats.totalCountForType['maven-plugin']}</td>
+ <td align="right">${stats.totalCountForType['maven-archetype']}</td>
+ <td align="right">${stats.totalCountForType['jar']}</td>
+ <td align="right">${stats.totalCountForType['war']}</td>
++ <td align="right">${stats.totalCountForType['ear']}</td>
++ <td align="right">${stats.totalCountForType['exe']}</td>
++ <td align="right">${stats.totalCountForType['dll']}</td>
++ <td align="right">${stats.totalCountForType['zip']}</td>
+ </tr>
</c:forEach>
</table>
color: gray;
}
-
-
-
-
+#messages {
+ background-color: yellow;
+ border: 1px solid orange;
+ margin-top: 2em;
+}
+
+#messages ul {
+ list-style-image: url(../images/icon_warning_sml.gif)
+}
+
+ table.auditlogs {
+ text-align: center;
+ font-family: Verdana, Geneva, Arial, Helvetica, sans-serif ;
+ font-weight: normal;
+ font-size: 11px;
+ color: #fff;
+ width: 100%;
+ background-color: #666;
+ border: 0px;
+ border-collapse: collapse;
+ border-spacing: 0px;
+ }
+
+ table.auditlogs th {
+ background-color: #666;
+ color: #fff;
+ padding: 4px;
+ text-align: center;
+ border-bottom: 2px #fff solid;
+ font-size: 12px;
+ font-weight: bold;
+ }
+
+ table.auditlogs td {
+ background-color: #CCC;
+ color: #000;
+ padding: 4px;
+ text-align: center;
+ border: 1px #fff solid;
+ }
+
+ div.auditLogReportResults {
+ border: 1px dashed #DFDEDE;
+ margin-bottom: 15px;
+ margin-left: 2px;
+ padding: 5px;
+ }
public class UploadActionTest
extends PlexusInSpringTestCase
{
-- private ArchivaTaskScheduler scheduler;
--
-- private MockControl schedulerControl;
--
private UploadAction uploadAction;
private ArchivaConfiguration archivaConfig;
private RepositoryContentFactory repoFactory;
private MockControl repoFactoryControl;
-
+
- private ArchivaAuditLogsDao auditLogsDao;
-
- private MockControl auditLogsDaoControl;
-
private static final String REPOSITORY_ID = "test-repo";
- private Configuration config;
+ private Configuration config;
public void setUp()
throws Exception
{
super.setUp();
-- schedulerControl = MockControl.createControl( ArchivaTaskScheduler.class );
-- scheduler = (ArchivaTaskScheduler) schedulerControl.getMock();
++ MockControl schedulerControl = MockControl.createControl( ArchivaTaskScheduler.class );
++ ArchivaTaskScheduler scheduler = (ArchivaTaskScheduler) schedulerControl.getMock();
archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
archivaConfig = (ArchivaConfiguration) archivaConfigControl.getMock();
repoFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
repoFactory = (RepositoryContentFactory) repoFactoryControl.getMock();
-
+
- auditLogsDaoControl = MockControl.createControl( ArchivaAuditLogsDao.class );
- auditLogsDaoControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- auditLogsDao = (ArchivaAuditLogsDao) auditLogsDaoControl.getMock();
-
uploadAction = new UploadAction();
uploadAction.setScheduler( scheduler );
uploadAction.setConfiguration( archivaConfig );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null );
--
++
archivaConfigControl.replay();
repoFactoryControl.replay();
- auditLogsDaoControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config, 2 );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content, 2 );
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null, 2 );
--
++
archivaConfigControl.replay();
repoFactoryControl.replay();
- auditLogsDaoControl.replay();
--
++
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
private MockControl archivaConfigurationControl;
private ArchivaConfiguration archivaConfiguration;
-
+
- private ArchivaAuditLogsDao auditLogsDao;
-
- private MockControl auditLogsDaoControl;
-
private static final String REPO_ID = "repo-ident";
private File location;
archivaConfigurationControl = MockControl.createControl( ArchivaConfiguration.class );
archivaConfiguration = (ArchivaConfiguration) archivaConfigurationControl.getMock();
action.setArchivaConfiguration( archivaConfiguration );
-
+
- auditLogsDaoControl = MockControl.createControl( ArchivaAuditLogsDao.class );
- auditLogsDaoControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- auditLogsDao = (ArchivaAuditLogsDao) auditLogsDaoControl.getMock();
- action.setAuditLogsDao( auditLogsDao );
-
roleManagerControl = MockControl.createControl( RoleManager.class );
roleManager = (RoleManager) roleManagerControl.getMock();
action.setRoleManager( roleManager );
Configuration configuration = prepDeletionTest( createRepository(), 4 );
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null );
- auditLogsDaoControl.replay();
-
-- String status = action.deleteEntry();
++ String status = action.deleteEntry();
- auditLogsDaoControl.verify();
-
assertEquals( Action.SUCCESS, status );
assertTrue( configuration.getManagedRepositories().isEmpty() );
Configuration configuration = prepDeletionTest( createRepository(), 4 );
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null );
- auditLogsDaoControl.replay();
-
String status = action.deleteContents();
-
+
- auditLogsDaoControl.verify();
-
assertEquals( Action.SUCCESS, status );
assertTrue( configuration.getManagedRepositories().isEmpty() );
assertEquals( 1, configuration.getProxyConnectors().size() );
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null );
- auditLogsDaoControl.replay();
-
String status = action.deleteContents();
- auditLogsDaoControl.verify();
+
assertEquals( Action.SUCCESS, status );
assertTrue( configuration.getManagedRepositories().isEmpty() );
public void testDeleteRepositoryCancelled()
throws Exception
{
+ repositoryStatisticsManagerControl.replay();
+
ManagedRepositoryConfiguration originalRepository = createRepository();
Configuration configuration = prepDeletionTest( originalRepository, 3 );
+
String status = action.execute();
assertEquals( Action.SUCCESS, status );
ManagedRepositoryConfiguration repository = action.getRepository();
populateRepository( repository );
repository.setName( "new repo name" );
-
+
- auditLogsDaoControl.expectAndReturn( auditLogsDao.saveAuditLogs( new ArchivaAuditLogs() ), null );
- auditLogsDaoControl.replay();
-
String status = action.commit();
assertEquals( Action.SUCCESS, status );
-
+
ManagedRepositoryConfiguration newRepository = createRepository();
newRepository.setName( "new repo name" );
assertRepositoryEquals( repository, newRepository );
<!-- 30 minutes = 1800 seconds -->
<time-to-live-seconds>1800</time-to-live-seconds>
</configuration>
- </component>
- </component>
- <component>
- <role>org.apache.maven.archiva.database.ArchivaAuditLogsDao</role>
- <role-hint>jdo</role-hint>
- <implementation>org.apache.maven.archiva.web.action.admin.repositories.ArchivaAuditLogsDaoStub</implementation>
+ </component>
+
</components>
</plexus>
private void triggerAuditEvent( DavResource member, String event ) throws DavException
{
String path = logicalResource + "/" + member.getDisplayName();
--
-- triggerAuditEvent( checkDavResourceIsArchivaDavResource( member ).remoteAddr, locator.getRepositoryId(), path,
-- event );
++
++ ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( member );
++ AuditEvent auditEvent = new AuditEvent( locator.getRepositoryId(), resource.principal, path, event );
++ auditEvent.setRemoteIP( resource.remoteAddr );
++
++ for ( AuditListener listener : auditListeners )
++ {
++ listener.auditEvent( auditEvent );
++ }
}
public void move( DavResource destination )
private MockControl repoContentFactoryControl;
private RepositoryContentFactory repoFactory;
-
+
- private ArchivaAuditLogsDao auditLogsDao;
-
- private MockControl auditLogsDaoControl;
-
public void setUp()
throws Exception
{
archivaConfigurationControl = MockControl.createControl( ArchivaConfiguration.class );
archivaConfiguration = (ArchivaConfiguration) archivaConfigurationControl.getMock();
-
+
- auditLogsDaoControl = MockControl.createControl( ArchivaAuditLogsDao.class );
- auditLogsDaoControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
- auditLogsDao = (ArchivaAuditLogsDao) auditLogsDaoControl.getMock();
-
config = new Configuration();
config.addManagedRepository( createManagedRepository( RELEASES_REPO, new File( getBasedir(),
"target/test-classes/" +
--- /dev/null
+The following is the intended content model for the metadata content repository:
+
+.
+`-- repositories/
+ `-- central/
+ |-- config/
+ | |-- name=
+ | |-- storageUrl=
+ | `-- uri=
+ |-- content/
+ | `-- org/
+ | `-- apache/
+ | |-- archiva/
+ | | `-- platform/
+ | | |-- scanner/
+ | | | |-- 1.0-SNAPSHOT/
+ | | | | |-- scanner-1.0-20091120.012345-1.pom/
+ | | | | | |-- asc=
+ | | | | | |-- created=
+ | | | | | |-- fileCreated=
+ | | | | | |-- fileLastModified=
+ | | | | | |-- maven:buildNumber=
+ | | | | | |-- maven:classifier
+ | | | | | |-- maven:timestamp=
+ | | | | | |-- maven:type=
+ | | | | | |-- md5=
+ | | | | | |-- sha1=
+ | | | | | |-- size=
+ | | | | | |-- updated=
+ | | | | | `-- version=
+ | | | | |-- ciManagement.system=
+ | | | | |-- ciManagement.url=
+ | | | | |-- created=
+ | | | | |-- dependencies.0.artifactId=
+ | | | | |-- dependencies.0.classifier=
+ | | | | |-- dependencies.0.groupId=
+ | | | | |-- dependencies.0.optional=
+ | | | | |-- dependencies.0.scope=
+ | | | | |-- dependencies.0.systemPath=
+ | | | | |-- dependencies.0.type=
+ | | | | |-- dependencies.0.version=
+ | | | | |-- description=
+ | | | | |-- individuals.0.email=
+ | | | | |-- individuals.0.name=
+ | | | | |-- individuals.0.properties.scmId=
+ | | | | |-- individuals.0.roles.0=
+ | | | | |-- individuals.0.timezone=
+ | | | | |-- issueManagement.system=
+ | | | | |-- issueManagement.url=
+ | | | | |-- licenses.0.name=
+ | | | | |-- licenses.0.url=
+ | | | | |-- mailingLists.0.mainArchiveUrl=
+ | | | | |-- mailingLists.0.name=
+ | | | | |-- mailingLists.0.otherArchives.0=
+ | | | | |-- mailingLists.0.postAddress=
+ | | | | |-- mailingLists.0.subscribeAddress=
+ | | | | |-- mailingLists.0.unsubscribeAddress=
+ | | | | |-- maven:buildExtensions.0.artifactId=
+ | | | | |-- maven:buildExtensions.0.groupId=
+ | | | | |-- maven:buildExtensions.0.version=
+ | | | | |-- maven:packaging=
+ | | | | |-- maven:parent.artifactId=
+ | | | | |-- maven:parent.groupId=
+ | | | | |-- maven:parent.version=
+ | | | | |-- maven:plugins.0.artifactId=
+ | | | | |-- maven:plugins.0.groupId=
+ | | | | |-- maven:plugins.0.reporting=
+ | | | | |-- maven:plugins.0.version=
+ | | | | |-- maven:properties.mavenVersion=
+ | | | | |-- maven:repositories.0.id=
+ | | | | |-- maven:repositories.0.layout=
+ | | | | |-- maven:repositories.0.name=
+ | | | | |-- maven:repositories.0.plugins=
+ | | | | |-- maven:repositories.0.releases=
+ | | | | |-- maven:repositories.0.snapshots=
+ | | | | |-- maven:repositories.0.url=
+ | | | | |-- name=
+ | | | | |-- organization.favicon=
+ | | | | |-- organization.logo=
+ | | | | |-- organization.name=
+ | | | | |-- organization.url=
+ | | | | |-- relocatedTo.namespace=
+ | | | | |-- relocatedTo.project=
+ | | | | |-- relocatedTo.projectVersion=
+ | | | | |-- scm.connection=
+ | | | | |-- scm.developerConnection=
+ | | | | |-- scm.url=
+ | | | | |-- updated=
+ | | | | `-- url=
+ | | | `-- maven:artifactId=
+ | | `-- maven:groupId=
+ | `-- maven/
+ | `-- plugins/
+ | |-- maven:groupId=
+ | |-- maven:plugins.compiler.artifactId=
+ | `-- maven:plugins.compiler.name=
+ |-- facets/
++ | |-- org.apache.archiva.audit/
++ | | `-- 2010/
++ | | `-- 01/
++ | | `-- 19/
++ | | `-- 093600.000/
++ | | |-- action=
++ | | |-- artifact.id=
++ | | |-- artifact.namespace=
++ | | |-- artifact.projectId=
++ | | |-- artifact.version=
++ | | |-- remoteIP=
++ | | `-- user=
+ | |-- org.apache.archiva.metadata.repository.stats/
+ | | `-- 2009/
+ | | `-- 12/
+ | | `-- 03/
+ | | `-- 090000.000/
+ | | |-- scanEndTime=
+ | | |-- scanStartTime=
+ | | |-- totalArtifactCount=
+ | | |-- totalArtifactFileSize=
+ | | |-- totalFileCount=
+ | | |-- totalGroupCount=
+ | | `-- totalProjectCount=
+ | `-- org.apache.archiva.reports/
+ `-- references/
+ `-- org/
+ `-- apache/
+ `-- archiva/
+ |-- parent/
+ | `-- 1/
+ | `-- references/
+ | `-- org/
+ | `-- apache/
+ | `-- archiva/
+ | |-- platform/
+ | | `-- scanner/
+ | | `-- 1.0-SNAPSHOT/
+ | | `-- referenceType=parent
+ | `-- web/
+ | `-- webapp/
+ | `-- 1.0-SNAPSHOT/
+ | `-- referenceType=parent
+ `-- platform/
+ `-- scanner/
+ `-- 1.0-SNAPSHOT/
+ `-- references/
+ `-- org/
+ `-- apache/
+ `-- archiva/
+ `-- web/
+ `-- webapp/
+ `-- 1.0-SNAPSHOT/
+ `-- referenceType=dependency
+
+(To update - run "tree --dirsfirst -F" on the unpacked content-model.zip from the sandbox)
+
+Notes:
+
+*) config should be reflected to an external configuration file and only stored in the content repository for purposes
+ of accessing through a REST API, for example
+
+*) In the above example, we have the following coordinates:
+ - namespace = org.apache.archiva.platform (namespaces are of arbitrary depth, and are project namespaces, not to be
+ confused with JCR's item/node namespaces)
+ - project = scanner
+ - version = 1.0-SNAPSHOT
+ - artifact = scanner-1.0-20091120.012345-1.pom
+
+*) filename (scanner-1.0-20091120.012345-1.pom) is a node, and each is distinct except for checksums, etc.
+
+*) the top level version (1.0-SNAPSHOT) is the version best used to describe the project (the "marketed version"). It
+ must still be unique for lookup and comparing project versions to each other, but can contain several different
+ "build" artifacts.
+
+*) Projects are just a single code project. They do not have subprojects - if such modeling needs to be done, then we
+ can create a products tree that will map what "Archiva 1.0" contains from the other repositories.
+
+*) There is not Maven-native information here, other than that in the maven: namespace. pom & other files are not
+ treated as special - they are each stored and it is up to the reader to interpret
+
+*) artifact data is not stored in the metadata repository (there is no data= property on the file). The information here
+ is enough to locate the file in the original storageUrl when it is requested
+
+*) The API will still use separate namespace and project identifiers (the namespace can be null if there isn't one).
+ This is chosen to allow splitting the namespace on '.', and also allowing '.' in the project identifier without
+ splitting
+
+*) properties with '.' may be nested in other representations such as Java models or XML, if appropriate
+
+*) we only keep one set of project information for a "version" - this differs from Maven's storage of one POM per
+ snapshot. The Maven 2 module will take the latest. Those that need Maven's behaviour should retrieve the POM
+ directly. Implementations are also free to store as much information as desired within the artifact node in addition
+ to whatever is shared in the project version node.
+
+*) while some information is stored at the most generic level in the metadata repository (eg maven:groupId,
+ maven:artifactId), for convenience when loaded by the implementation it may all be pushed into the projectVersion's
+ information. The metadata repository implementation can decide how best to store and retrieve the information.
+
+*) created/updated timestamps may be maintained by the metadata repository implementation for the metadata itself.
+ Timestamps for individual files are stored as additional properties (fileCreated, fileLastModified). It may make
+ sense to add a "discovered" timestamp if an artifact is known to be created at a different time to which it is added
+ to the metadata repository.
+
+*) references are stored outside the main model so that their creation doesn't imply a "stub" model - we know if the
+ project exists whether a reference is created or not. References need not infer referential integrity.
+
+*) some of the above needs to be reviewed before going into production. For example:
+ - the maven specific aspects of dependencies should become a faceted part of the content
+ - more of the metadata might be faceted in general, keeping the content model basic by default
+ - determine if any of the stats can be derived by functions of the content repository rather than storing and trying
+ to keep them up to date. Historical data might be retained by versioning and taking a snapshot at a given point in
+ time. The current approach of tying them to the scanning process is not optimal
+ - the storing of metadata as 0-indexed lists would be better in as child nodes. This might require additional levels
+ in the current repository (.../scanner/versions/1.0-SNAPSHOT/artifacts/scanner-1.0-20091120.012345-1.pom), or
+ for listed information to be in a separate tree
+ (/metadata/org/apache/archiva/platform/scanner/1.0-SNAPSHOT/mailingLists/users), or to use some 'reserved names'
+ for nodes (by using a content repository's namespacing capabilities). The first has the advantage of
+ keeping information together but a longer path name and less familiarity to Maven users. The second arbitrarily
+ divides metadata. The third option seems preferable but needs more investigation at this stage.
+
+*) Future possibilities:
+ - audit metadata on artifacts (who uploaded, when, and how), or whether it was discovered by scanning
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>metadata</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>metadata-model</artifactId>
+ <name>Archiva Metadata Model</name>
+</project>
--- /dev/null
+package org.apache.archiva.metadata.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+public interface MetadataFacetFactory
+{
+ MetadataFacet createMetadataFacet();
++
++ MetadataFacet createMetadataFacet( String repositoryId, String name );
+}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>metadata</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>metadata-repository-api</artifactId>
+ <name>Archiva Metadata Repository API</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-model</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-modules</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>metadata</artifactId>
+ <name>Archiva Metadata</name>
+ <packaging>pom</packaging>
+ <modules>
+ <module>metadata-model</module>
+ <module>metadata-repository-api</module>
+ </modules>
+</project>
--- /dev/null
--- /dev/null
++<?xml version="1.0" encoding="UTF-8"?>
++<!--
++ ~ Licensed to the Apache Software Foundation (ASF) under one
++ ~ or more contributor license agreements. See the NOTICE file
++ ~ distributed with this work for additional information
++ ~ regarding copyright ownership. The ASF licenses this file
++ ~ to you under the Apache License, Version 2.0 (the
++ ~ "License"); you may not use this file except in compliance
++ ~ with the License. You may obtain a copy of the License at
++ ~
++ ~ http://www.apache.org/licenses/LICENSE-2.0
++ ~
++ ~ Unless required by applicable law or agreed to in writing,
++ ~ software distributed under the License is distributed on an
++ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ ~ KIND, either express or implied. See the License for the
++ ~ specific language governing permissions and limitations
++ ~ under the License.
++-->
++<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
++ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
++ <modelVersion>4.0.0</modelVersion>
++ <parent>
++ <artifactId>plugins</artifactId>
++ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
++ </parent>
++ <artifactId>audit</artifactId>
++ <name>Audit Logging</name>
++ <dependencies>
++ <dependency>
++ <groupId>org.apache.archiva</groupId>
++ <artifactId>archiva-repository-layer</artifactId>
++ </dependency>
++ <dependency>
++ <groupId>org.apache.archiva</groupId>
++ <artifactId>metadata-repository-api</artifactId>
++ </dependency>
++ <dependency>
++ <groupId>org.slf4j</groupId>
++ <artifactId>slf4j-simple</artifactId>
++ <scope>test</scope>
++ </dependency>
++ </dependencies>
++</project>
--- /dev/null
--- /dev/null
++package org.apache.archiva.audit;
++
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++import org.apache.archiva.metadata.model.MetadataFacet;
++import org.apache.archiva.metadata.model.MetadataFacetFactory;
++import org.apache.maven.archiva.repository.audit.AuditEvent;
++
++/**
++ * @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.audit"
++ */
++public class AuditEventFactory
++ implements MetadataFacetFactory
++{
++ public MetadataFacet createMetadataFacet()
++ {
++ throw new UnsupportedOperationException( "Must construct an audit event with a name" );
++ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ return new AuditEvent( name, repositoryId );
++ }
++}
--- /dev/null
--- /dev/null
++package org.apache.archiva.audit;
++
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++import java.util.Date;
++import java.util.List;
++
++import org.apache.maven.archiva.repository.audit.AuditEvent;
++
++public interface AuditManager
++{
++ List<AuditEvent> getMostRecentAuditEvents();
++
++ void addAuditEvent( AuditEvent event );
++
++ void deleteAuditEvents( String repositoryId );
++
++ List<AuditEvent> getAuditEventsInRange( String repositoryId, Date startTime, Date endTime );
++}
--- /dev/null
--- /dev/null
++package org.apache.archiva.audit;
++
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++import java.text.ParseException;
++import java.text.SimpleDateFormat;
++import java.util.ArrayList;
++import java.util.Collection;
++import java.util.Collections;
++import java.util.Comparator;
++import java.util.Date;
++import java.util.List;
++
++import org.apache.archiva.metadata.repository.MetadataRepository;
++import org.apache.maven.archiva.repository.audit.AuditEvent;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
++
++/**
++ * @plexus.component role="org.apache.archiva.audit.AuditManager"
++ */
++public class DefaultAuditManager
++ implements AuditManager
++{
++ /**
++ * @plexus.requirement
++ */
++ private MetadataRepository metadataRepository;
++
++ private static final int NUM_RECENT_REVENTS = 10;
++
++ private static final Logger log = LoggerFactory.getLogger( DefaultAuditManager.class );
++
++ public List<AuditEvent> getMostRecentAuditEvents()
++ {
++ // TODO: consider a more efficient implementation that directly gets the last ten from the content repository
++ List<AuditRecord> records = new ArrayList<AuditRecord>();
++ for ( String repositoryId : metadataRepository.getRepositories() )
++ {
++ List<String> timestamps = metadataRepository.getMetadataFacets( repositoryId, AuditEvent.FACET_ID );
++ for ( String timestamp : timestamps )
++ {
++ records.add( new AuditRecord( repositoryId, timestamp ) );
++ }
++ }
++ Collections.sort( records );
++ records = records.subList( 0, records.size() < NUM_RECENT_REVENTS ? records.size() : NUM_RECENT_REVENTS );
++
++ List<AuditEvent> events = new ArrayList<AuditEvent>( records.size() );
++ for ( AuditRecord record : records )
++ {
++ AuditEvent auditEvent =
++ (AuditEvent) metadataRepository.getMetadataFacet( record.repositoryId, AuditEvent.FACET_ID,
++ record.name );
++ events.add( auditEvent );
++ }
++ return events;
++ }
++
++ public void addAuditEvent( AuditEvent event )
++ {
++ // ignore those with no repository - they will still be logged to the textual audit log
++ if ( event.getRepositoryId() != null )
++ {
++ metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
++ }
++ }
++
++ public void deleteAuditEvents( String repositoryId )
++ {
++ metadataRepository.removeMetadataFacets( repositoryId, AuditEvent.FACET_ID );
++ }
++
++ public List<AuditEvent> getAuditEventsInRange( String repoId, Date startTime, Date endTime )
++ {
++ Collection<String> repositoryIds =
++ repoId != null ? Collections.singletonList( repoId ) : metadataRepository.getRepositories();
++
++ List<AuditEvent> results = new ArrayList<AuditEvent>();
++ for ( String repositoryId : repositoryIds )
++ {
++ List<String> list = metadataRepository.getMetadataFacets( repositoryId, AuditEvent.FACET_ID );
++ for ( String name : list )
++ {
++ try
++ {
++ Date date = new SimpleDateFormat( AuditEvent.TIMESTAMP_FORMAT ).parse( name );
++ if ( ( startTime == null || !date.before( startTime ) ) &&
++ ( endTime == null || !date.after( endTime ) ) )
++ {
++ AuditEvent event =
++ (AuditEvent) metadataRepository.getMetadataFacet( repositoryId, AuditEvent.FACET_ID, name );
++ results.add( event );
++ }
++ }
++ catch ( ParseException e )
++ {
++ log.error( "Invalid audit event found in the metadata repository: " + e.getMessage() );
++ // continue and ignore this one
++ }
++ }
++ }
++ Collections.sort( results, new Comparator<AuditEvent>()
++ {
++ public int compare( AuditEvent o1, AuditEvent o2 )
++ {
++ return o2.getTimestamp().compareTo( o1.getTimestamp() );
++ }
++ } );
++ return results;
++ }
++
++ public void setMetadataRepository( MetadataRepository metadataRepository )
++ {
++ this.metadataRepository = metadataRepository;
++ }
++
++ private static final class AuditRecord
++ implements Comparable<AuditRecord>
++ {
++ private String repositoryId;
++
++ private String name;
++
++ public AuditRecord( String repositoryId, String name )
++ {
++ this.repositoryId = repositoryId;
++ this.name = name;
++ }
++
++ public int compareTo( AuditRecord other )
++ {
++ // reverse ordering
++ return other.name.compareTo( name );
++ }
++ }
++}
--- /dev/null
--- /dev/null
++package org.apache.archiva.audit;
++
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++import org.apache.maven.archiva.repository.audit.AuditEvent;
++import org.apache.maven.archiva.repository.audit.AuditListener;
++
++/**
++ * @plexus.component role="org.apache.maven.archiva.repository.audit.AuditListener" role-hint="metadata"
++ */
++public class MetadataAuditListener
++ implements AuditListener
++{
++ /**
++ * @plexus.requirement
++ */
++ private AuditManager auditManager;
++
++ public void auditEvent( AuditEvent event )
++ {
++ // for now we only log upload events, some of the others are quite noisy
++ if ( event.getAction().equals( AuditEvent.CREATE_FILE ) || event.getAction().equals( AuditEvent.UPLOAD_FILE ) )
++ {
++ auditManager.addAuditEvent( event );
++ }
++ }
++}
--- /dev/null
--- /dev/null
++package org.apache.archiva.audit;
++
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
++
++import java.text.DecimalFormat;
++import java.text.ParseException;
++import java.text.SimpleDateFormat;
++import java.util.ArrayList;
++import java.util.Arrays;
++import java.util.Collections;
++import java.util.Date;
++import java.util.LinkedHashMap;
++import java.util.List;
++import java.util.Map;
++
++import junit.framework.TestCase;
++import org.apache.archiva.metadata.repository.MetadataRepository;
++import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
++import org.apache.maven.archiva.repository.RepositoryContentFactory;
++import org.apache.maven.archiva.repository.audit.AuditEvent;
++import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
++import org.easymock.MockControl;
++import org.easymock.classextension.MockClassControl;
++
++public class AuditManagerTest
++ extends TestCase
++{
++ private DefaultAuditManager auditManager;
++
++ private MockControl metadataRepositoryControl;
++
++ private MetadataRepository metadataRepository;
++
++ private static final String AUDIT_EVENT_BASE = "2010/01/18/123456.";
++
++ private static final String TEST_REPO_ID = "test-repo";
++
++ private static final String TEST_REPO_ID_2 = "repo2";
++
++ private static final String TEST_USER = "test_user";
++
++ private static final String TEST_RESOURCE_BASE = "test/resource";
++
++ private static final String TEST_IP_ADDRESS = "127.0.0.1";
++
++ private static final SimpleDateFormat TIMESTAMP_FORMAT = new SimpleDateFormat( AuditEvent.TIMESTAMP_FORMAT );
++
++ private static final DecimalFormat MILLIS_FORMAT = new DecimalFormat( "000" );
++
++ @Override
++ protected void setUp()
++ throws Exception
++ {
++ super.setUp();
++
++ auditManager = new DefaultAuditManager();
++
++ metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
++ metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
++ auditManager.setMetadataRepository( metadataRepository );
++
++ ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
++ repository.setId( TEST_REPO_ID );
++ repository.setLocation( "" );
++ ManagedDefaultRepositoryContent content = new ManagedDefaultRepositoryContent();
++ content.setRepository( repository );
++ MockControl control = MockClassControl.createControl( RepositoryContentFactory.class );
++ RepositoryContentFactory contentFactory = (RepositoryContentFactory) control.getMock();
++ contentFactory.getManagedRepositoryContent( TEST_REPO_ID );
++ control.setDefaultReturnValue( content );
++ control.replay();
++ }
++
++ public void testGetMostRecentEvents()
++ throws ParseException
++ {
++ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
++ Collections.singletonList( TEST_REPO_ID ) );
++
++ int numEvents = 11;
++ List<String> eventNames = new ArrayList<String>( numEvents );
++ for ( int i = 0; i < numEvents; i++ )
++ {
++ eventNames.add( AUDIT_EVENT_BASE + MILLIS_FORMAT.format( i ) );
++ }
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ), eventNames );
++
++ for ( String name : eventNames.subList( 1, eventNames.size() ) )
++ {
++ AuditEvent event = createTestEvent( name );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name ), event );
++ }
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events = auditManager.getMostRecentAuditEvents();
++ assertNotNull( events );
++ assertEquals( numEvents - 1, events.size() );
++ int expectedTimestampCounter = numEvents - 1;
++ for ( AuditEvent event : events )
++ {
++ String num = MILLIS_FORMAT.format( expectedTimestampCounter );
++ assertEvent( event, AUDIT_EVENT_BASE + num, TEST_RESOURCE_BASE + "/" + num );
++ expectedTimestampCounter--;
++ }
++
++ metadataRepositoryControl.verify();
++ }
++
++ private static AuditEvent createTestEvent( String name )
++ throws ParseException
++ {
++ return createTestEvent( TEST_REPO_ID, name );
++ }
++
++ private static AuditEvent createTestEvent( String repositoryId, String name )
++ throws ParseException
++ {
++ AuditEvent event = new AuditEvent();
++ event.setTimestamp( TIMESTAMP_FORMAT.parse( name ) );
++ event.setAction( AuditEvent.UPLOAD_FILE );
++ event.setRemoteIP( TEST_IP_ADDRESS );
++ event.setRepositoryId( repositoryId );
++ event.setUserId( TEST_USER );
++ event.setResource( TEST_RESOURCE_BASE + "/" + name.substring( AUDIT_EVENT_BASE.length() ) );
++ return event;
++ }
++
++ public void testGetMostRecentEventsLessThan10()
++ throws ParseException
++ {
++ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
++ Collections.singletonList( TEST_REPO_ID ) );
++ int numEvents = 5;
++ List<String> eventNames = new ArrayList<String>( numEvents );
++ for ( int i = 0; i < numEvents; i++ )
++ {
++ eventNames.add( AUDIT_EVENT_BASE + MILLIS_FORMAT.format( i ) );
++ }
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ), eventNames );
++
++ for ( String name : eventNames )
++ {
++ AuditEvent event = createTestEvent( name );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name ), event );
++ }
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events = auditManager.getMostRecentAuditEvents();
++ assertNotNull( events );
++ assertEquals( numEvents, events.size() );
++ int expectedTimestampCounter = numEvents - 1;
++ for ( AuditEvent event : events )
++ {
++ String num = MILLIS_FORMAT.format( expectedTimestampCounter );
++ assertEvent( event, AUDIT_EVENT_BASE + num, TEST_RESOURCE_BASE + "/" + num );
++ expectedTimestampCounter--;
++ }
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetMostRecentEventsInterleavedRepositories()
++ throws ParseException
++ {
++ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
++ Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ) );
++ int numEvents = 11;
++ Map<String, List<String>> eventNames = new LinkedHashMap<String, List<String>>();
++ List<AuditEvent> events = new ArrayList<AuditEvent>();
++ eventNames.put( TEST_REPO_ID, new ArrayList<String>() );
++ eventNames.put( TEST_REPO_ID_2, new ArrayList<String>() );
++ for ( int i = 0; i < numEvents; i++ )
++ {
++ String name = AUDIT_EVENT_BASE + MILLIS_FORMAT.format( i );
++ String repositoryId = i % 2 == 0 ? TEST_REPO_ID : TEST_REPO_ID_2;
++ eventNames.get( repositoryId ).add( name );
++ events.add( createTestEvent( repositoryId, name ) );
++ }
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ), eventNames.get( TEST_REPO_ID ) );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID_2, AuditEvent.FACET_ID ),
++ eventNames.get( TEST_REPO_ID_2 ) );
++
++ for ( AuditEvent event : events.subList( 1, events.size() ) )
++ {
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( event.getRepositoryId(), AuditEvent.FACET_ID, event.getName() ),
++ event );
++ }
++ metadataRepositoryControl.replay();
++
++ events = auditManager.getMostRecentAuditEvents();
++ assertNotNull( events );
++ assertEquals( numEvents - 1, events.size() );
++ int expectedTimestampCounter = numEvents - 1;
++ for ( AuditEvent event : events )
++ {
++ String num = MILLIS_FORMAT.format( expectedTimestampCounter );
++ assertEvent( event, AUDIT_EVENT_BASE + num, TEST_RESOURCE_BASE + "/" + num,
++ expectedTimestampCounter % 2 == 0 ? TEST_REPO_ID : TEST_REPO_ID_2 );
++ expectedTimestampCounter--;
++ }
++
++ metadataRepositoryControl.verify();
++ }
++
++ private static void assertEvent( AuditEvent event, String name, String resource )
++ {
++ assertEvent( event, name, resource, TEST_REPO_ID );
++ }
++
++ private static void assertEvent( AuditEvent event, String name, String resource, String repositoryId )
++ {
++ assertEquals( name, TIMESTAMP_FORMAT.format( event.getTimestamp() ) );
++ assertEquals( AuditEvent.UPLOAD_FILE, event.getAction() );
++ assertEquals( TEST_IP_ADDRESS, event.getRemoteIP() );
++ assertEquals( repositoryId, event.getRepositoryId() );
++ assertEquals( TEST_USER, event.getUserId() );
++ assertEquals( resource, event.getResource() );
++ }
++
++ public void testGetMostRecentEventsWhenEmpty()
++ {
++ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
++ Collections.singletonList( TEST_REPO_ID ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ), Collections.emptyList() );
++ metadataRepositoryControl.replay();
++
++ assertTrue( auditManager.getMostRecentAuditEvents().isEmpty() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testAddAuditEvent()
++ throws ParseException
++ {
++ String name = TIMESTAMP_FORMAT.format( new Date() );
++ AuditEvent event = createTestEvent( name );
++
++ metadataRepository.addMetadataFacet( TEST_REPO_ID, event );
++
++ metadataRepositoryControl.replay();
++
++ auditManager.addAuditEvent( event );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testAddAuditEventNoRepositoryId()
++ throws ParseException
++ {
++ String name = TIMESTAMP_FORMAT.format( new Date() );
++ AuditEvent event = createTestEvent( null, name );
++
++ // should just be ignored
++
++ metadataRepositoryControl.replay();
++
++ auditManager.addAuditEvent( event );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testDeleteStats()
++ {
++ metadataRepository.removeMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID );
++
++ metadataRepositoryControl.replay();
++
++ auditManager.deleteAuditEvents( TEST_REPO_ID );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeInside()
++ throws ParseException
++ {
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent = createTestEvent( name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ),
++ Arrays.asList( name1, name2, name3 ) );
++
++ // only match the middle one
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name2 ), expectedEvent );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( TEST_REPO_ID, new Date( current.getTime() - 4000 ),
++ new Date( current.getTime() - 2000 ) );
++
++ assertEquals( 1, events.size() );
++ assertEvent( events.get( 0 ), name2, expectedEvent.getResource() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeUpperOutside()
++ throws ParseException
++ {
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent2 = createTestEvent( name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++ AuditEvent expectedEvent3 = createTestEvent( name3 );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ),
++ Arrays.asList( name1, name2, name3 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name2 ), expectedEvent2 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name3 ), expectedEvent3 );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( TEST_REPO_ID, new Date( current.getTime() - 4000 ), current );
++
++ assertEquals( 2, events.size() );
++ assertEvent( events.get( 0 ), name3, expectedEvent3.getResource() );
++ assertEvent( events.get( 1 ), name2, expectedEvent2.getResource() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeLowerOutside()
++ throws ParseException
++ {
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ AuditEvent expectedEvent1 = createTestEvent( name1 );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent2 = createTestEvent( name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ),
++ Arrays.asList( name1, name2, name3 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name1 ), expectedEvent1 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name2 ), expectedEvent2 );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ),
++ new Date( current.getTime() - 2000 ) );
++
++ assertEquals( 2, events.size() );
++ assertEvent( events.get( 0 ), name2, expectedEvent2.getResource() );
++ assertEvent( events.get( 1 ), name1, expectedEvent1.getResource() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeLowerAndUpperOutside()
++ throws ParseException
++ {
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ AuditEvent expectedEvent1 = createTestEvent( name1 );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent2 = createTestEvent( name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++ AuditEvent expectedEvent3 = createTestEvent( name3 );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ),
++ Arrays.asList( name1, name2, name3 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name1 ), expectedEvent1 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name2 ), expectedEvent2 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name3 ), expectedEvent3 );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ), current );
++
++ assertEquals( 3, events.size() );
++ assertEvent( events.get( 0 ), name3, expectedEvent3.getResource() );
++ assertEvent( events.get( 1 ), name2, expectedEvent2.getResource() );
++ assertEvent( events.get( 2 ), name1, expectedEvent1.getResource() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeMultipleRepositories()
++ throws ParseException
++ {
++ metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
++ Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ) );
++
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ AuditEvent expectedEvent1 = createTestEvent( TEST_REPO_ID, name1 );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent2 = createTestEvent( TEST_REPO_ID_2, name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++ AuditEvent expectedEvent3 = createTestEvent( TEST_REPO_ID, name3 );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ), Arrays.asList( name1, name3 ) );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID_2, AuditEvent.FACET_ID ), Arrays.asList( name2 ) );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name1 ), expectedEvent1 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID_2, AuditEvent.FACET_ID, name2 ), expectedEvent2 );
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacet( TEST_REPO_ID, AuditEvent.FACET_ID, name3 ), expectedEvent3 );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( null, new Date( current.getTime() - 20000 ), current );
++
++ assertEquals( 3, events.size() );
++ assertEvent( events.get( 0 ), name3, expectedEvent3.getResource() );
++ assertEvent( events.get( 1 ), name2, expectedEvent2.getResource(), TEST_REPO_ID_2 );
++ assertEvent( events.get( 2 ), name1, expectedEvent1.getResource() );
++
++ metadataRepositoryControl.verify();
++ }
++
++ public void testGetEventsRangeNotInside()
++ throws ParseException
++ {
++ Date current = new Date();
++
++ String name1 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 12345 ) );
++ AuditEvent expectedEvent1 = createTestEvent( name1 );
++ Date expectedTimestamp = new Date( current.getTime() - 3000 );
++ String name2 = TIMESTAMP_FORMAT.format( expectedTimestamp );
++ AuditEvent expectedEvent2 = createTestEvent( name2 );
++ String name3 = TIMESTAMP_FORMAT.format( new Date( current.getTime() - 1000 ) );
++ AuditEvent expectedEvent3 = createTestEvent( name3 );
++
++ metadataRepositoryControl.expectAndReturn(
++ metadataRepository.getMetadataFacets( TEST_REPO_ID, AuditEvent.FACET_ID ),
++ Arrays.asList( name1, name2, name3 ) );
++
++ metadataRepositoryControl.replay();
++
++ List<AuditEvent> events =
++ auditManager.getAuditEventsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ),
++ new Date( current.getTime() - 16000 ) );
++
++ assertEquals( 0, events.size() );
++
++ metadataRepositoryControl.verify();
++ }
++}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>plugins</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>maven2-repository</artifactId>
+ <name>Maven 2.x Repository Support</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>problem-reports</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-model</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-model-builder</artifactId>
+ </dependency>
+ <!-- TODO: remove when dependency-tree declares it explicitly, it is currently through the excluded maven-project -->
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-artifact</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.shared</groupId>
+ <artifactId>maven-dependency-tree</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-project</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-configuration</artifactId>
+ </dependency>
+ <!-- TODO: aim to remove this dependency -->
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-xml-tools</artifactId>
+ </dependency>
+ </dependencies>
+ <dependencyManagement>
+ <dependencies>
+ <!-- TODO: this is to override the top level dependency management - we need to rationalise these -->
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-model</artifactId>
+ <version>3.0-alpha-4</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+</project>
--- /dev/null
+package org.apache.archiva.metadata.repository.storage.maven2;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.metadata.model.MetadataFacet;
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.metadata.repository.storage.maven2"
+ */
+public class MavenProjectFacetFactory
+ implements MetadataFacetFactory
+{
+ public MetadataFacet createMetadataFacet()
+ {
+ return new MavenProjectFacet();
+ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ throw new UnsupportedOperationException( "There is no valid name for project version facets" );
++ }
+}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>plugins</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>metadata-repository-file</artifactId>
+ <name>File System Backed Metadata Repository</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
- metadataFacet = metadataFacetFactory.createMetadataFacet();
+package org.apache.archiva.metadata.repository.file;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.model.CiManagement;
+import org.apache.archiva.metadata.model.Dependency;
+import org.apache.archiva.metadata.model.IssueManagement;
+import org.apache.archiva.metadata.model.License;
+import org.apache.archiva.metadata.model.MailingList;
+import org.apache.archiva.metadata.model.MetadataFacet;
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+import org.apache.archiva.metadata.model.Organization;
+import org.apache.archiva.metadata.model.ProjectMetadata;
+import org.apache.archiva.metadata.model.ProjectVersionMetadata;
+import org.apache.archiva.metadata.model.ProjectVersionReference;
+import org.apache.archiva.metadata.model.Scm;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.repository.MetadataRepository"
+ */
+public class FileMetadataRepository
+ implements MetadataRepository
+{
+ /**
+ * TODO: this isn't suitable for production use
+ *
+ * @plexus.configuration
+ */
+ private File directory = new File( System.getProperty( "user.home" ), ".archiva-metadata" );
+
+ /**
+ * @plexus.requirement role="org.apache.archiva.metadata.model.MetadataFacetFactory"
+ */
+ private Map<String, MetadataFacetFactory> metadataFacetFactories;
+
+ private static final Logger log = LoggerFactory.getLogger( FileMetadataRepository.class );
+
+ private static final String PROJECT_METADATA_KEY = "project-metadata";
+
+ private static final String PROJECT_VERSION_METADATA_KEY = "version-metadata";
+
+ private static final String NAMESPACE_METADATA_KEY = "namespace-metadata";
+
+ private static final String METADATA_KEY = "metadata";
+
+ public void updateProject( String repoId, ProjectMetadata project )
+ {
+ updateProject( repoId, project.getNamespace(), project.getId() );
+ }
+
+ private void updateProject( String repoId, String namespace, String id )
+ {
+ // TODO: this is a more braindead implementation than we would normally expect, for prototyping purposes
+ updateNamespace( repoId, namespace );
+
+ try
+ {
+ File namespaceDirectory = new File( this.directory, repoId + "/" + namespace );
+ Properties properties = new Properties();
+ properties.setProperty( "namespace", namespace );
+ properties.setProperty( "id", id );
+ writeProperties( properties, new File( namespaceDirectory, id ), PROJECT_METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO!
+ e.printStackTrace();
+ }
+ }
+
+ public void updateProjectVersion( String repoId, String namespace, String projectId,
+ ProjectVersionMetadata versionMetadata )
+ {
+ updateProject( repoId, namespace, projectId );
+
+ File directory =
+ new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + versionMetadata.getId() );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+ // remove properties that are not references or artifacts
+ for ( String name : properties.stringPropertyNames() )
+ {
+ if ( !name.contains( ":" ) && !name.equals( "facetIds" ) )
+ {
+ properties.remove( name );
+ }
+ }
+ properties.setProperty( "id", versionMetadata.getId() );
+ setProperty( properties, "name", versionMetadata.getName() );
+ setProperty( properties, "description", versionMetadata.getDescription() );
+ setProperty( properties, "url", versionMetadata.getUrl() );
+ setProperty( properties, "incomplete", String.valueOf( versionMetadata.isIncomplete() ) );
+ if ( versionMetadata.getScm() != null )
+ {
+ setProperty( properties, "scm.connection", versionMetadata.getScm().getConnection() );
+ setProperty( properties, "scm.developerConnection", versionMetadata.getScm().getDeveloperConnection() );
+ setProperty( properties, "scm.url", versionMetadata.getScm().getUrl() );
+ }
+ if ( versionMetadata.getCiManagement() != null )
+ {
+ setProperty( properties, "ci.system", versionMetadata.getCiManagement().getSystem() );
+ setProperty( properties, "ci.url", versionMetadata.getCiManagement().getUrl() );
+ }
+ if ( versionMetadata.getIssueManagement() != null )
+ {
+ setProperty( properties, "issue.system", versionMetadata.getIssueManagement().getSystem() );
+ setProperty( properties, "issue.url", versionMetadata.getIssueManagement().getUrl() );
+ }
+ if ( versionMetadata.getOrganization() != null )
+ {
+ setProperty( properties, "org.name", versionMetadata.getOrganization().getName() );
+ setProperty( properties, "org.url", versionMetadata.getOrganization().getUrl() );
+ }
+ int i = 0;
+ for ( License license : versionMetadata.getLicenses() )
+ {
+ setProperty( properties, "license." + i + ".name", license.getName() );
+ setProperty( properties, "license." + i + ".url", license.getUrl() );
+ i++;
+ }
+ i = 0;
+ for ( MailingList mailingList : versionMetadata.getMailingLists() )
+ {
+ setProperty( properties, "mailingList." + i + ".archive", mailingList.getMainArchiveUrl() );
+ setProperty( properties, "mailingList." + i + ".name", mailingList.getName() );
+ setProperty( properties, "mailingList." + i + ".post", mailingList.getPostAddress() );
+ setProperty( properties, "mailingList." + i + ".unsubscribe", mailingList.getUnsubscribeAddress() );
+ setProperty( properties, "mailingList." + i + ".subscribe", mailingList.getSubscribeAddress() );
+ setProperty( properties, "mailingList." + i + ".otherArchives", join( mailingList.getOtherArchives() ) );
+ i++;
+ }
+ i = 0;
+ for ( Dependency dependency : versionMetadata.getDependencies() )
+ {
+ setProperty( properties, "dependency." + i + ".classifier", dependency.getClassifier() );
+ setProperty( properties, "dependency." + i + ".scope", dependency.getScope() );
+ setProperty( properties, "dependency." + i + ".systemPath", dependency.getSystemPath() );
+ setProperty( properties, "dependency." + i + ".artifactId", dependency.getArtifactId() );
+ setProperty( properties, "dependency." + i + ".groupId", dependency.getGroupId() );
+ setProperty( properties, "dependency." + i + ".version", dependency.getVersion() );
+ setProperty( properties, "dependency." + i + ".type", dependency.getType() );
+ i++;
+ }
+ Set<String> facetIds = new LinkedHashSet<String>( versionMetadata.getFacetIds() );
+ facetIds.addAll( Arrays.asList( properties.getProperty( "facetIds", "" ).split( "," ) ) );
+ properties.setProperty( "facetIds", join( facetIds ) );
+
+ for ( MetadataFacet facet : versionMetadata.getFacetList() )
+ {
+ properties.putAll( facet.toProperties() );
+ }
+
+ try
+ {
+ writeProperties( properties, directory, PROJECT_VERSION_METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public void updateProjectReference( String repoId, String namespace, String projectId, String projectVersion,
+ ProjectVersionReference reference )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+ int i = Integer.valueOf( properties.getProperty( "ref:lastReferenceNum", "-1" ) ) + 1;
+ setProperty( properties, "ref:lastReferenceNum", Integer.toString( i ) );
+ setProperty( properties, "ref:reference." + i + ".namespace", reference.getNamespace() );
+ setProperty( properties, "ref:reference." + i + ".projectId", reference.getProjectId() );
+ setProperty( properties, "ref:reference." + i + ".projectVersion", reference.getProjectVersion() );
+ setProperty( properties, "ref:reference." + i + ".referenceType", reference.getReferenceType().toString() );
+
+ try
+ {
+ writeProperties( properties, directory, PROJECT_VERSION_METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public void updateNamespace( String repoId, String namespace )
+ {
+ try
+ {
+ File namespaceDirectory = new File( this.directory, repoId + "/" + namespace );
+ Properties properties = new Properties();
+ properties.setProperty( "namespace", namespace );
+ writeProperties( properties, namespaceDirectory, NAMESPACE_METADATA_KEY );
+
+ }
+ catch ( IOException e )
+ {
+ // TODO!
+ e.printStackTrace();
+ }
+ }
+
+ public List<String> getMetadataFacets( String repoId, String facetId )
+ {
+ File directory = getMetadataDirectory( repoId, facetId );
+ List<String> facets = new ArrayList<String>();
+ recurse( facets, "", directory );
+ return facets;
+ }
+
+ private void recurse( List<String> facets, String prefix, File directory )
+ {
+ File[] list = directory.listFiles();
+ if ( list != null )
+ {
+ for ( File dir : list )
+ {
+ if ( dir.isDirectory() )
+ {
+ recurse( facets, prefix + "/" + dir.getName(), dir );
+ }
+ else if ( dir.getName().equals( METADATA_KEY + ".properties" ) )
+ {
+ facets.add( prefix.substring( 1 ) );
+ }
+ }
+ }
+ }
+
+ public MetadataFacet getMetadataFacet( String repositoryId, String facetId, String name )
+ {
+ Properties properties;
+ try
+ {
+ properties =
+ readProperties( new File( getMetadataDirectory( repositoryId, facetId ), name ), METADATA_KEY );
+ }
+ catch ( FileNotFoundException e )
+ {
+ return null;
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ return null;
+ }
+ MetadataFacet metadataFacet = null;
+ MetadataFacetFactory metadataFacetFactory = metadataFacetFactories.get( facetId );
+ if ( metadataFacetFactory != null )
+ {
++ metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
+ Map<String, String> map = new HashMap<String, String>();
+ for ( String key : properties.stringPropertyNames() )
+ {
+ map.put( key, properties.getProperty( key ) );
+ }
+ metadataFacet.fromProperties( map );
+ }
+ return metadataFacet;
+ }
+
+ public void addMetadataFacet( String repositoryId, MetadataFacet metadataFacet )
+ {
+ Properties properties = new Properties();
+ properties.putAll( metadataFacet.toProperties() );
+
+ try
+ {
+ File directory =
+ new File( getMetadataDirectory( repositoryId, metadataFacet.getFacetId() ), metadataFacet.getName() );
+ writeProperties( properties, directory, METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO!
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public void removeMetadataFacets( String repositoryId, String facetId )
+ {
+ try
+ {
+ FileUtils.deleteDirectory( getMetadataDirectory( repositoryId, facetId ) );
+ }
+ catch ( IOException e )
+ {
+ // TODO!
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public void removeMetadataFacet( String repoId, String facetId, String name )
+ {
+ File dir = new File( getMetadataDirectory( repoId, facetId ), name );
+ try
+ {
+ FileUtils.deleteDirectory( dir );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public List<ArtifactMetadata> getArtifactsByDateRange( String repoId, Date startTime, Date endTime )
+ {
+ // TODO: this is quite slow - if we are to persist with this repository implementation we should build an index
+ // of this information (eg. in Lucene, as before)
+
+ List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>();
+ for ( String ns : getRootNamespaces( repoId ) )
+ {
+ getArtifactsByDateRange( artifacts, repoId, ns, startTime, endTime );
+ }
+ return artifacts;
+ }
+
+ private void getArtifactsByDateRange( List<ArtifactMetadata> artifacts, String repoId, String ns, Date startTime,
+ Date endTime )
+ {
+ for ( String namespace : getNamespaces( repoId, ns ) )
+ {
+ getArtifactsByDateRange( artifacts, repoId, ns + "." + namespace, startTime, endTime );
+ }
+
+ for ( String project : getProjects( repoId, ns ) )
+ {
+ for ( String version : getProjectVersions( repoId, ns, project ) )
+ {
+ for ( ArtifactMetadata artifact : getArtifacts( repoId, ns, project, version ) )
+ {
+ if ( startTime == null || startTime.before( artifact.getWhenGathered() ) )
+ {
+ if ( endTime == null || endTime.after( artifact.getWhenGathered() ) )
+ {
+ artifacts.add( artifact );
+ }
+ }
+ }
+ }
+ }
+ }
+
+ public Collection<ArtifactMetadata> getArtifacts( String repoId, String namespace, String projectId,
+ String projectVersion )
+ {
+ Map<String, ArtifactMetadata> artifacts = new HashMap<String, ArtifactMetadata>();
+
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+
+ for ( Map.Entry entry : properties.entrySet() )
+ {
+ String name = (String) entry.getKey();
+ StringTokenizer tok = new StringTokenizer( name, ":" );
+ if ( tok.hasMoreTokens() && "artifact".equals( tok.nextToken() ) )
+ {
+ String field = tok.nextToken();
+ String id = tok.nextToken();
+
+ ArtifactMetadata artifact = artifacts.get( id );
+ if ( artifact == null )
+ {
+ artifact = new ArtifactMetadata();
+ artifact.setRepositoryId( repoId );
+ artifact.setNamespace( namespace );
+ artifact.setProject( projectId );
+ artifact.setVersion( projectVersion );
+ artifact.setId( id );
+ artifacts.put( id, artifact );
+ }
+
+ String value = (String) entry.getValue();
+ if ( "updated".equals( field ) )
+ {
+ artifact.setFileLastModified( Long.valueOf( value ) );
+ }
+ else if ( "size".equals( field ) )
+ {
+ artifact.setSize( Long.valueOf( value ) );
+ }
+ else if ( "whenGathered".equals( field ) )
+ {
+ artifact.setWhenGathered( new Date( Long.valueOf( value ) ) );
+ }
+ else if ( "version".equals( field ) )
+ {
+ artifact.setVersion( value );
+ }
+ else if ( "md5".equals( field ) )
+ {
+ artifact.setMd5( value );
+ }
+ else if ( "sha1".equals( field ) )
+ {
+ artifact.setSha1( value );
+ }
+ }
+ }
+ return artifacts.values();
+ }
+
+ public Collection<String> getRepositories()
+ {
+ String[] repoIds = this.directory.list();
+ return repoIds != null ? Arrays.asList( repoIds ) : Collections.<String>emptyList();
+ }
+
+ public List<ArtifactMetadata> getArtifactsByChecksum( String repositoryId, String checksum )
+ {
+ // TODO: this is quite slow - if we are to persist with this repository implementation we should build an index
+ // of this information (eg. in Lucene, as before)
+ // alternatively, we could build a referential tree in the content repository, however it would need some levels
+ // of depth to avoid being too broad to be useful (eg. /repository/checksums/a/ab/abcdef1234567)
+
+ List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>();
+ for ( String ns : getRootNamespaces( repositoryId ) )
+ {
+ getArtifactsByChecksum( artifacts, repositoryId, ns, checksum );
+ }
+ return artifacts;
+ }
+
+ public void deleteArtifact( String repositoryId, String namespace, String project, String version, String id )
+ {
+ File directory = new File( this.directory, repositoryId + "/" + namespace + "/" + project + "/" + version );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+
+ properties.remove( "artifact:updated:" + id );
+ properties.remove( "artifact:whenGathered:" + id );
+ properties.remove( "artifact:size:" + id );
+ properties.remove( "artifact:md5:" + id );
+ properties.remove( "artifact:sha1:" + id );
+ properties.remove( "artifact:version:" + id );
+
+ try
+ {
+ writeProperties( properties, directory, PROJECT_VERSION_METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ public void deleteRepository( String repoId )
+ {
+ File directory = new File( this.directory, repoId );
+
+ try
+ {
+ FileUtils.deleteDirectory( directory );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ private void getArtifactsByChecksum( List<ArtifactMetadata> artifacts, String repositoryId, String ns,
+ String checksum )
+ {
+ for ( String namespace : getNamespaces( repositoryId, ns ) )
+ {
+ getArtifactsByChecksum( artifacts, repositoryId, ns + "." + namespace, checksum );
+ }
+
+ for ( String project : getProjects( repositoryId, ns ) )
+ {
+ for ( String version : getProjectVersions( repositoryId, ns, project ) )
+ {
+ for ( ArtifactMetadata artifact : getArtifacts( repositoryId, ns, project, version ) )
+ {
+ if ( checksum.equals( artifact.getMd5() ) || checksum.equals( artifact.getSha1() ) )
+ {
+ artifacts.add( artifact );
+ }
+ }
+ }
+ }
+ }
+
+ private File getMetadataDirectory( String repositoryId, String facetId )
+ {
+ return new File( this.directory, repositoryId + "/.meta/" + facetId );
+ }
+
+ private String join( Collection<String> ids )
+ {
+ if ( !ids.isEmpty() )
+ {
+ StringBuilder s = new StringBuilder();
+ for ( String id : ids )
+ {
+ s.append( id );
+ s.append( "," );
+ }
+ return s.substring( 0, s.length() - 1 );
+ }
+ return "";
+ }
+
+ private void setProperty( Properties properties, String name, String value )
+ {
+ if ( value != null )
+ {
+ properties.setProperty( name, value );
+ }
+ }
+
+ public void updateArtifact( String repoId, String namespace, String projectId, String projectVersion,
+ ArtifactMetadata artifact )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+
+ properties.setProperty( "artifact:updated:" + artifact.getId(),
+ Long.toString( artifact.getFileLastModified().getTime() ) );
+ properties.setProperty( "artifact:whenGathered:" + artifact.getId(),
+ Long.toString( artifact.getWhenGathered().getTime() ) );
+ properties.setProperty( "artifact:size:" + artifact.getId(), Long.toString( artifact.getSize() ) );
+ if ( artifact.getMd5() != null )
+ {
+ properties.setProperty( "artifact:md5:" + artifact.getId(), artifact.getMd5() );
+ }
+ if ( artifact.getSha1() != null )
+ {
+ properties.setProperty( "artifact:sha1:" + artifact.getId(), artifact.getSha1() );
+ }
+ properties.setProperty( "artifact:version:" + artifact.getId(), artifact.getVersion() );
+
+ try
+ {
+ writeProperties( properties, directory, PROJECT_VERSION_METADATA_KEY );
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ }
+
+ private Properties readOrCreateProperties( File directory, String propertiesKey )
+ {
+ try
+ {
+ return readProperties( directory, propertiesKey );
+ }
+ catch ( FileNotFoundException e )
+ {
+ // ignore and return new properties
+ }
+ catch ( IOException e )
+ {
+ // TODO
+ e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
+ }
+ return new Properties();
+ }
+
+ private Properties readProperties( File directory, String propertiesKey )
+ throws IOException
+ {
+ Properties properties = new Properties();
+ FileInputStream in = null;
+ try
+ {
+ in = new FileInputStream( new File( directory, propertiesKey + ".properties" ) );
+ properties.load( in );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( in );
+ }
+ return properties;
+ }
+
+ public ProjectMetadata getProject( String repoId, String namespace, String projectId )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+
+ ProjectMetadata project = new ProjectMetadata();
+ project.setNamespace( properties.getProperty( "namespace" ) );
+ project.setId( properties.getProperty( "id" ) );
+ return project;
+ }
+
+ public ProjectVersionMetadata getProjectVersion( String repoId, String namespace, String projectId,
+ String projectVersion )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+ String id = properties.getProperty( "id" );
+ ProjectVersionMetadata versionMetadata = null;
+ if ( id != null )
+ {
+ versionMetadata = new ProjectVersionMetadata();
+ versionMetadata.setId( id );
+ versionMetadata.setName( properties.getProperty( "name" ) );
+ versionMetadata.setDescription( properties.getProperty( "description" ) );
+ versionMetadata.setUrl( properties.getProperty( "url" ) );
+ versionMetadata.setIncomplete( Boolean.valueOf( properties.getProperty( "incomplete", "false" ) ) );
+
+ String scmConnection = properties.getProperty( "scm.connection" );
+ String scmDeveloperConnection = properties.getProperty( "scm.developerConnection" );
+ String scmUrl = properties.getProperty( "scm.url" );
+ if ( scmConnection != null || scmDeveloperConnection != null || scmUrl != null )
+ {
+ Scm scm = new Scm();
+ scm.setConnection( scmConnection );
+ scm.setDeveloperConnection( scmDeveloperConnection );
+ scm.setUrl( scmUrl );
+ versionMetadata.setScm( scm );
+ }
+
+ String ciSystem = properties.getProperty( "ci.system" );
+ String ciUrl = properties.getProperty( "ci.url" );
+ if ( ciSystem != null || ciUrl != null )
+ {
+ CiManagement ci = new CiManagement();
+ ci.setSystem( ciSystem );
+ ci.setUrl( ciUrl );
+ versionMetadata.setCiManagement( ci );
+ }
+
+ String issueSystem = properties.getProperty( "issue.system" );
+ String issueUrl = properties.getProperty( "issue.url" );
+ if ( issueSystem != null || issueUrl != null )
+ {
+ IssueManagement issueManagement = new IssueManagement();
+ issueManagement.setSystem( issueSystem );
+ issueManagement.setUrl( issueUrl );
+ versionMetadata.setIssueManagement( issueManagement );
+ }
+
+ String orgName = properties.getProperty( "org.name" );
+ String orgUrl = properties.getProperty( "org.url" );
+ if ( orgName != null || orgUrl != null )
+ {
+ Organization org = new Organization();
+ org.setName( orgName );
+ org.setUrl( orgUrl );
+ versionMetadata.setOrganization( org );
+ }
+
+ boolean done = false;
+ int i = 0;
+ while ( !done )
+ {
+ String licenseName = properties.getProperty( "license." + i + ".name" );
+ String licenseUrl = properties.getProperty( "license." + i + ".url" );
+ if ( licenseName != null || licenseUrl != null )
+ {
+ License license = new License();
+ license.setName( licenseName );
+ license.setUrl( licenseUrl );
+ versionMetadata.addLicense( license );
+ }
+ else
+ {
+ done = true;
+ }
+ i++;
+ }
+
+ done = false;
+ i = 0;
+ while ( !done )
+ {
+ String mailingListName = properties.getProperty( "mailingList." + i + ".name" );
+ if ( mailingListName != null )
+ {
+ MailingList mailingList = new MailingList();
+ mailingList.setName( mailingListName );
+ mailingList.setMainArchiveUrl( properties.getProperty( "mailingList." + i + ".archive" ) );
+ mailingList.setOtherArchives(
+ Arrays.asList( properties.getProperty( "mailingList." + i + ".otherArchives" ).split( "," ) ) );
+ mailingList.setPostAddress( properties.getProperty( "mailingList." + i + ".post" ) );
+ mailingList.setSubscribeAddress( properties.getProperty( "mailingList." + i + ".subscribe" ) );
+ mailingList.setUnsubscribeAddress( properties.getProperty( "mailingList." + i + ".unsubscribe" ) );
+ versionMetadata.addMailingList( mailingList );
+ }
+ else
+ {
+ done = true;
+ }
+ i++;
+ }
+
+ done = false;
+ i = 0;
+ while ( !done )
+ {
+ String dependencyArtifactId = properties.getProperty( "dependency." + i + ".artifactId" );
+ if ( dependencyArtifactId != null )
+ {
+ Dependency dependency = new Dependency();
+ dependency.setArtifactId( dependencyArtifactId );
+ dependency.setGroupId( properties.getProperty( "dependency." + i + ".groupId" ) );
+ dependency.setClassifier( properties.getProperty( "dependency." + i + ".classifier" ) );
+ dependency.setOptional(
+ Boolean.valueOf( properties.getProperty( "dependency." + i + ".optional" ) ) );
+ dependency.setScope( properties.getProperty( "dependency." + i + ".scope" ) );
+ dependency.setSystemPath( properties.getProperty( "dependency." + i + ".systemPath" ) );
+ dependency.setType( properties.getProperty( "dependency." + i + ".type" ) );
+ dependency.setVersion( properties.getProperty( "dependency." + i + ".version" ) );
+ versionMetadata.addDependency( dependency );
+ }
+ else
+ {
+ done = true;
+ }
+ i++;
+ }
+
+ String facetIds = properties.getProperty( "facetIds", "" );
+ if ( facetIds.length() > 0 )
+ {
+ for ( String facetId : facetIds.split( "," ) )
+ {
+ MetadataFacetFactory factory = metadataFacetFactories.get( facetId );
+ if ( factory == null )
+ {
+ log.error( "Attempted to load unknown metadata facet: " + facetId );
+ }
+ else
+ {
+ MetadataFacet facet = factory.createMetadataFacet();
+ Map<String, String> map = new HashMap<String, String>();
+ for ( String key : properties.stringPropertyNames() )
+ {
+ if ( key.startsWith( facet.getFacetId() ) )
+ {
+ map.put( key, properties.getProperty( key ) );
+ }
+ }
+ facet.fromProperties( map );
+ versionMetadata.addFacet( facet );
+ }
+ }
+ }
+
+ for ( MetadataFacet facet : versionMetadata.getFacetList() )
+ {
+ properties.putAll( facet.toProperties() );
+ }
+ }
+ return versionMetadata;
+ }
+
+ public Collection<String> getArtifactVersions( String repoId, String namespace, String projectId,
+ String projectVersion )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+
+ Set<String> versions = new HashSet<String>();
+ for ( Map.Entry entry : properties.entrySet() )
+ {
+ String name = (String) entry.getKey();
+ if ( name.startsWith( "artifact:version:" ) )
+ {
+ versions.add( (String) entry.getValue() );
+ }
+ }
+ return versions;
+ }
+
+ public Collection<ProjectVersionReference> getProjectReferences( String repoId, String namespace, String projectId,
+ String projectVersion )
+ {
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId + "/" + projectVersion );
+
+ Properties properties = readOrCreateProperties( directory, PROJECT_VERSION_METADATA_KEY );
+ int numberOfRefs = Integer.valueOf( properties.getProperty( "ref:lastReferenceNum", "-1" ) ) + 1;
+
+ List<ProjectVersionReference> references = new ArrayList<ProjectVersionReference>();
+ for ( int i = 0; i < numberOfRefs; i++ )
+ {
+ ProjectVersionReference reference = new ProjectVersionReference();
+ reference.setProjectId( properties.getProperty( "ref:reference." + i + ".projectId" ) );
+ reference.setNamespace( properties.getProperty( "ref:reference." + i + ".namespace" ) );
+ reference.setProjectVersion( properties.getProperty( "ref:reference." + i + ".projectVersion" ) );
+ reference.setReferenceType( ProjectVersionReference.ReferenceType.valueOf(
+ properties.getProperty( "ref:reference." + i + ".referenceType" ) ) );
+ references.add( reference );
+ }
+ return references;
+ }
+
+ public Collection<String> getRootNamespaces( String repoId )
+ {
+ return getNamespaces( repoId, null );
+ }
+
+ public Collection<String> getNamespaces( String repoId, String baseNamespace )
+ {
+ List<String> allNamespaces = new ArrayList<String>();
+ File directory = new File( this.directory, repoId );
+ File[] files = directory.listFiles();
+ if ( files != null )
+ {
+ for ( File namespace : files )
+ {
+ if ( new File( namespace, NAMESPACE_METADATA_KEY + ".properties" ).exists() )
+ {
+ allNamespaces.add( namespace.getName() );
+ }
+ }
+ }
+
+ Set<String> namespaces = new LinkedHashSet<String>();
+ int fromIndex = baseNamespace != null ? baseNamespace.length() + 1 : 0;
+ for ( String namespace : allNamespaces )
+ {
+ if ( baseNamespace == null || namespace.startsWith( baseNamespace + "." ) )
+ {
+ int i = namespace.indexOf( '.', fromIndex );
+ if ( i >= 0 )
+ {
+ namespaces.add( namespace.substring( fromIndex, i ) );
+ }
+ else
+ {
+ namespaces.add( namespace.substring( fromIndex ) );
+ }
+ }
+ }
+ return new ArrayList<String>( namespaces );
+ }
+
+ public Collection<String> getProjects( String repoId, String namespace )
+ {
+ List<String> projects = new ArrayList<String>();
+ File directory = new File( this.directory, repoId + "/" + namespace );
+ File[] files = directory.listFiles();
+ if ( files != null )
+ {
+ for ( File project : files )
+ {
+ if ( new File( project, PROJECT_METADATA_KEY + ".properties" ).exists() )
+ {
+ projects.add( project.getName() );
+ }
+ }
+ }
+ return projects;
+ }
+
+ public Collection<String> getProjectVersions( String repoId, String namespace, String projectId )
+ {
+ List<String> projectVersions = new ArrayList<String>();
+ File directory = new File( this.directory, repoId + "/" + namespace + "/" + projectId );
+ File[] files = directory.listFiles();
+ if ( files != null )
+ {
+ for ( File projectVersion : files )
+ {
+ if ( new File( projectVersion, PROJECT_VERSION_METADATA_KEY + ".properties" ).exists() )
+ {
+ projectVersions.add( projectVersion.getName() );
+ }
+ }
+ }
+ return projectVersions;
+ }
+
+ private void writeProperties( Properties properties, File directory, String propertiesKey )
+ throws IOException
+ {
+ directory.mkdirs();
+ FileOutputStream os = new FileOutputStream( new File( directory, propertiesKey + ".properties" ) );
+ try
+ {
+ properties.store( os, null );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( os );
+ }
+ }
+
+ public void setDirectory( File directory )
+ {
+ this.directory = directory;
+ }
+
+ public void setMetadataFacetFactories( Map<String, MetadataFacetFactory> metadataFacetFactories )
+ {
+ this.metadataFacetFactories = metadataFacetFactories;
+ }
+}
--- /dev/null
- return new TestMetadataFacet( "test-metadata" );
+package org.apache.archiva.metadata.repository.file;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.model.MailingList;
+import org.apache.archiva.metadata.model.MetadataFacet;
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+import org.apache.archiva.metadata.model.ProjectMetadata;
+import org.apache.archiva.metadata.model.ProjectVersionMetadata;
+import org.apache.commons.io.FileUtils;
+import org.codehaus.plexus.spring.PlexusInSpringTestCase;
+
+/**
+ * @todo should this be a generic MetadataRepository implementation test?
+ */
+public class FileMetadataRepositoryTest
+ extends PlexusInSpringTestCase
+{
+ private FileMetadataRepository repository;
+
+ private static final String TEST_REPO_ID = "test";
+
+ private static final String TEST_PROJECT = "projectId";
+
+ private static final String TEST_NAMESPACE = "namespace";
+
+ private static final String TEST_PROJECT_VERSION = "1.0";
+
+ private static final String TEST_FACET_ID = "test-facet-id";
+
+ private static final String TEST_NAME = "test/name";
+
+ private static final String TEST_VALUE = "test-value";
+
+ private static final String UNKNOWN = "unknown";
+
+ private static final String OTHER_REPO = "other-repo";
+
+ private static final String TEST_MD5 = "bd4a9b642562547754086de2dab26b7d";
+
+ private static final String TEST_SHA1 = "2e5daf0201ddeb068a62d5e08da18657ab2c6be9";
+
++ private static final String TEST_METADATA_VALUE = "test-metadata";
++
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ repository = new FileMetadataRepository();
+ File directory = getTestFile( "target/test-repository" );
+ FileUtils.deleteDirectory( directory );
+ repository.setDirectory( directory );
+
+ Map<String, MetadataFacetFactory> factories = new HashMap<String, MetadataFacetFactory>();
+ factories.put( TEST_FACET_ID, new MetadataFacetFactory()
+ {
+ public MetadataFacet createMetadataFacet()
+ {
- return new TestMetadataFacet( "", "test-value" );
++ return new TestMetadataFacet( TEST_METADATA_VALUE );
++ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ return new TestMetadataFacet( TEST_METADATA_VALUE );
+ }
+ } );
+
+ // add to ensure we don't accidentally create an empty facet ID.
+ factories.put( "", new MetadataFacetFactory()
+ {
+ public MetadataFacet createMetadataFacet()
+ {
- assertEquals( new TestMetadataFacet( "test-metadata" ),
++ return new TestMetadataFacet( "", TEST_VALUE );
++ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ return new TestMetadataFacet( "", TEST_VALUE );
+ }
+ } );
+ repository.setMetadataFacetFactories( factories );
+ }
+
+ public void testRootNamespaceWithNoMetadataRepository()
+ {
+ Collection<String> namespaces = repository.getRootNamespaces( TEST_REPO_ID );
+ assertEquals( Collections.<String>emptyList(), namespaces );
+ }
+
+ public void testUpdateProjectVersionMetadataWithNoOtherArchives()
+ {
+ ProjectVersionMetadata metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ MailingList mailingList = new MailingList();
+ mailingList.setName( "Foo List" );
+ mailingList.setOtherArchives( Collections.<String>emptyList() );
+ metadata.setMailingLists( Collections.singletonList( mailingList ) );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+ }
+
+ public void testUpdateProjectVersionMetadataIncomplete()
+ {
+ ProjectVersionMetadata metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ metadata.setIncomplete( true );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+
+ metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+ assertEquals( true, metadata.isIncomplete() );
+ }
+
+ public void testUpdateProjectVersionMetadataWithExistingFacets()
+ {
+ ProjectVersionMetadata metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ MetadataFacet facet = new TestMetadataFacet( "baz" );
+ metadata.addFacet( facet );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+
+ metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+ assertEquals( Collections.singleton( TEST_FACET_ID ), metadata.getFacetIds() );
+
+ metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+
+ metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+ assertEquals( Collections.singleton( TEST_FACET_ID ), metadata.getFacetIds() );
+ TestMetadataFacet testFacet = (TestMetadataFacet) metadata.getFacet( TEST_FACET_ID );
+ assertEquals( "baz", testFacet.getValue() );
+ }
+
+ public void testUpdateProjectVersionMetadataWithNoExistingFacets()
+ {
+ ProjectVersionMetadata metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+
+ metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+ assertEquals( Collections.<String>emptyList(), new ArrayList<String>( metadata.getFacetIds() ) );
+
+ metadata = new ProjectVersionMetadata();
+ metadata.setId( TEST_PROJECT_VERSION );
+ repository.updateProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, metadata );
+
+ metadata = repository.getProjectVersion( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION );
+ assertEquals( Collections.<String>emptyList(), new ArrayList<String>( metadata.getFacetIds() ) );
+ }
+
+ public void testGetMetadataFacet()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( TEST_VALUE ) );
+
+ assertEquals( new TestMetadataFacet( TEST_VALUE ),
+ repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ }
+
+ public void testGetMetadataFacetWhenEmpty()
+ {
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ }
+
+ public void testGetMetadataFacetWhenUnknownName()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( TEST_VALUE ) );
+
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, UNKNOWN ) );
+ }
+
+ public void testGetMetadataFacetWhenDefaultValue()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( null ) );
+
++ assertEquals( new TestMetadataFacet( TEST_METADATA_VALUE ),
+ repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ }
+
+ public void testGetMetadataFacetWhenUnknownFacetId()
+ {
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME ) );
+ }
+
+ public void testGetMetadataFacets()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( TEST_VALUE ) );
+
+ assertEquals( Collections.singletonList( TEST_NAME ),
+ repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID ) );
+ }
+
+ public void testGetMetadataFacetsWhenEmpty()
+ {
+ List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ }
+
+ public void testRemoveFacets()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( TEST_VALUE ) );
+
+ List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertFalse( facets.isEmpty() );
+
+ repository.removeMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+
+ facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ }
+
+ public void testRemoveFacetsWhenEmpty()
+ {
+ List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+
+ repository.removeMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+
+ facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ }
+
+ public void testRemoveFacetsWhenUnknown()
+ {
+ repository.removeMetadataFacets( TEST_REPO_ID, UNKNOWN );
+ }
+
+ public void testRemoveFacet()
+ {
+ TestMetadataFacet metadataFacet = new TestMetadataFacet( TEST_VALUE );
+ repository.addMetadataFacet( TEST_REPO_ID, metadataFacet );
+
+ assertEquals( metadataFacet, repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertFalse( facets.isEmpty() );
+
+ repository.removeMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME );
+
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ }
+
+ public void testRemoveFacetWhenEmpty()
+ {
+ List<String> facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+
+ repository.removeMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME );
+
+ facets = repository.getMetadataFacets( TEST_REPO_ID, TEST_FACET_ID );
+ assertTrue( facets.isEmpty() );
+ assertNull( repository.getMetadataFacet( TEST_REPO_ID, TEST_FACET_ID, TEST_NAME ) );
+ }
+
+ public void testRemoveFacetWhenUnknown()
+ {
+ repository.removeMetadataFacet( TEST_REPO_ID, UNKNOWN, TEST_NAME );
+ }
+
+ public void testGetArtifacts()
+ {
+ ArtifactMetadata artifact1 = createArtifact();
+ ArtifactMetadata artifact2 = createArtifact( "pom" );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact1 );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact2 );
+
+ assertEquals( Arrays.asList( artifact2, artifact1 ), new ArrayList<ArtifactMetadata>(
+ repository.getArtifacts( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION ) ) );
+ }
+
+ public void testGetArtifactVersions()
+ {
+ ArtifactMetadata artifact1 = createArtifact();
+ String version1 = "1.0-20091212.012345-1";
+ artifact1.setId( artifact1.getProject() + "-" + version1 + ".jar" );
+ artifact1.setVersion( version1 );
+ ArtifactMetadata artifact2 = createArtifact();
+ String version2 = "1.0-20091212.123456-2";
+ artifact2.setId( artifact2.getProject() + "-" + version2 + ".jar" );
+ artifact2.setVersion( version2 );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact1 );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact2 );
+
+ assertEquals( new HashSet<String>( Arrays.asList( version2, version1 ) ),
+ repository.getArtifactVersions( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT,
+ TEST_PROJECT_VERSION ) );
+ }
+
+ public void testGetArtifactVersionsMultipleArtifactsSingleVersion()
+ {
+ ArtifactMetadata artifact1 = createArtifact();
+ artifact1.setId( TEST_PROJECT + "-" + TEST_PROJECT_VERSION + ".jar" );
+ ArtifactMetadata artifact2 = createArtifact();
+ artifact2.setId( TEST_PROJECT + "-" + TEST_PROJECT_VERSION + "-sources.jar" );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact1 );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact2 );
+
+ assertEquals( Collections.singleton( TEST_PROJECT_VERSION ),
+ repository.getArtifactVersions( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT,
+ TEST_PROJECT_VERSION ) );
+ }
+
+ public void testRepositories()
+ {
+ repository.addMetadataFacet( TEST_REPO_ID, new TestMetadataFacet( TEST_VALUE ) );
+ repository.addMetadataFacet( OTHER_REPO, new TestMetadataFacet( TEST_VALUE ) );
+
+ assertEquals( Arrays.asList( OTHER_REPO, TEST_REPO_ID ), repository.getRepositories() );
+ }
+
+ public void testRepositoriesWhenEmpty()
+ {
+ assertTrue( repository.getRepositories().isEmpty() );
+ }
+
+ public void testGetArtifactsByDateRangeOpen()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByDateRange( TEST_REPO_ID, null, null ) );
+ }
+
+ public void testGetArtifactsByDateRangeSparseNamespace()
+ {
+ String namespace = "org.apache.archiva";
+ repository.updateNamespace( TEST_REPO_ID, namespace );
+ repository.updateProject( TEST_REPO_ID, createProject( namespace ) );
+ ArtifactMetadata artifact = createArtifact();
+ artifact.setNamespace( namespace );
+ repository.updateArtifact( TEST_REPO_ID, namespace, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByDateRange( TEST_REPO_ID, null, null ) );
+ }
+
+ public void testGetArtifactsByDateRangeLowerBound()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ Date date = new Date( artifact.getWhenGathered().getTime() - 10000 );
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByDateRange( TEST_REPO_ID, date, null ) );
+ }
+
+ public void testGetArtifactsByDateRangeLowerBoundOutOfRange()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ Date date = new Date( artifact.getWhenGathered().getTime() + 10000 );
+ assertTrue( repository.getArtifactsByDateRange( TEST_REPO_ID, date, null ).isEmpty() );
+ }
+
+ public void testGetArtifactsByDateRangeLowerAndUpperBound()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ Date lower = new Date( artifact.getWhenGathered().getTime() - 10000 );
+ Date upper = new Date( artifact.getWhenGathered().getTime() + 10000 );
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByDateRange( TEST_REPO_ID, lower, upper ) );
+ }
+
+ public void testGetArtifactsByDateRangeUpperBound()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ Date upper = new Date( artifact.getWhenGathered().getTime() + 10000 );
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByDateRange( TEST_REPO_ID, null, upper ) );
+ }
+
+ public void testGetArtifactsByDateRangeUpperBoundOutOfRange()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ Date upper = new Date( artifact.getWhenGathered().getTime() - 10000 );
+ assertTrue( repository.getArtifactsByDateRange( TEST_REPO_ID, null, upper ).isEmpty() );
+ }
+
+ public void testGetNamespacesWithSparseDepth()
+ {
+ repository.updateNamespace( TEST_REPO_ID, "org.apache.maven.shared" );
+
+ assertEquals( Arrays.asList( "org" ), repository.getRootNamespaces( TEST_REPO_ID ) );
+ assertEquals( Arrays.asList( "apache" ), repository.getNamespaces( TEST_REPO_ID, "org" ) );
+ assertEquals( Arrays.asList( "maven" ), repository.getNamespaces( TEST_REPO_ID, "org.apache" ) );
+ assertEquals( Arrays.asList( "shared" ), repository.getNamespaces( TEST_REPO_ID, "org.apache.maven" ) );
+ }
+
+ public void testGetArtifactsByChecksumSingleResultMd5()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByChecksum( TEST_REPO_ID, TEST_MD5 ) );
+ }
+
+ public void testGetArtifactsByChecksumSingleResultSha1()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.singletonList( artifact ),
+ repository.getArtifactsByChecksum( TEST_REPO_ID, TEST_SHA1 ) );
+ }
+
+ public void testGetArtifactsByChecksumMultipleResult()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+
+ ProjectMetadata projectMetadata = createProject();
+ repository.updateProject( TEST_REPO_ID, projectMetadata );
+ ArtifactMetadata artifact1 = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact1 );
+
+ projectMetadata = createProject();
+ String newProjectId = "another-project";
+ projectMetadata.setId( newProjectId );
+ repository.updateProject( TEST_REPO_ID, projectMetadata );
+ ArtifactMetadata artifact2 = createArtifact();
+ artifact2.setProject( newProjectId );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, newProjectId, TEST_PROJECT_VERSION, artifact2 );
+
+ assertEquals( Arrays.asList( artifact2, artifact1 ),
+ repository.getArtifactsByChecksum( TEST_REPO_ID, TEST_SHA1 ) );
+ }
+
+ public void testGetArtifactsByChecksumNoResult()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+ repository.updateProject( TEST_REPO_ID, createProject() );
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.<ArtifactMetadata>emptyList(),
+ repository.getArtifactsByChecksum( TEST_REPO_ID, "not a checksum" ) );
+ }
+
+ public void testDeleteArtifact()
+ {
+ ArtifactMetadata artifact = createArtifact();
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact );
+
+ assertEquals( Collections.singletonList( artifact ), new ArrayList<ArtifactMetadata>(
+ repository.getArtifacts( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION ) ) );
+
+ repository.deleteArtifact( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION, artifact.getId() );
+
+ assertTrue(
+ repository.getArtifacts( TEST_REPO_ID, TEST_NAMESPACE, TEST_PROJECT, TEST_PROJECT_VERSION ).isEmpty() );
+ }
+
+ public void testDeleteRepository()
+ {
+ repository.updateNamespace( TEST_REPO_ID, TEST_NAMESPACE );
+
+ ProjectMetadata project1 = new ProjectMetadata();
+ project1.setNamespace( TEST_NAMESPACE );
+ project1.setId( "project1" );
+ repository.updateProject( TEST_REPO_ID, project1 );
+ ProjectMetadata project2 = new ProjectMetadata();
+ project2.setNamespace( TEST_NAMESPACE );
+ project2.setId( "project2" );
+ repository.updateProject( TEST_REPO_ID, project2 );
+
+ ArtifactMetadata artifact1 = createArtifact();
+ artifact1.setProject( "project1" );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, "project1", TEST_PROJECT_VERSION, artifact1 );
+ ArtifactMetadata artifact2 = createArtifact();
+ artifact2.setProject( "project2" );
+ repository.updateArtifact( TEST_REPO_ID, TEST_NAMESPACE, "project2", TEST_PROJECT_VERSION, artifact2 );
+
+ assertEquals( Arrays.asList( artifact1, artifact2 ), new ArrayList<ArtifactMetadata>(
+ repository.getArtifactsByDateRange( TEST_REPO_ID, null, null ) ) );
+
+ repository.deleteRepository( TEST_REPO_ID );
+
+ assertTrue( repository.getArtifactsByDateRange( TEST_REPO_ID, null, null ).isEmpty() );
+ }
+
+ private ProjectMetadata createProject()
+ {
+ return createProject( TEST_NAMESPACE );
+ }
+
+ private ProjectMetadata createProject( String ns )
+ {
+ ProjectMetadata project = new ProjectMetadata();
+ project.setId( TEST_PROJECT );
+ project.setNamespace( ns );
+ return project;
+ }
+
+ private ArtifactMetadata createArtifact()
+ {
+ return createArtifact( "jar" );
+ }
+
+ private ArtifactMetadata createArtifact( String type )
+ {
+ ArtifactMetadata artifact = new ArtifactMetadata();
+ artifact.setId( TEST_PROJECT + "-" + TEST_PROJECT_VERSION + "." + type );
+ artifact.setWhenGathered( new Date() );
+ artifact.setNamespace( TEST_NAMESPACE );
+ artifact.setProject( TEST_PROJECT );
+ artifact.setRepositoryId( TEST_REPO_ID );
+ artifact.setFileLastModified( System.currentTimeMillis() );
+ artifact.setVersion( TEST_PROJECT_VERSION );
+ artifact.setMd5( TEST_MD5 );
+ artifact.setSha1( TEST_SHA1 );
+ return artifact;
+ }
+
+ private static class TestMetadataFacet
+ implements MetadataFacet
+ {
+ private String testFacetId;
+
+ private TestMetadataFacet( String value )
+ {
+ this.value = value;
+ testFacetId = TEST_FACET_ID;
+ }
+
+ private TestMetadataFacet( String facetId, String value )
+ {
+ this.value = value;
+ testFacetId = facetId;
+ }
+
+ private String value;
+
+ public String getFacetId()
+ {
+ return testFacetId;
+ }
+
+ public String getName()
+ {
+ return TEST_NAME;
+ }
+
+ public Map<String, String> toProperties()
+ {
+ if ( value != null )
+ {
+ return Collections.singletonMap( testFacetId + ":foo", value );
+ }
+ else
+ {
+ return Collections.emptyMap();
+ }
+ }
+
+ public void fromProperties( Map<String, String> properties )
+ {
+ String value = properties.get( testFacetId + ":foo" );
+ if ( value != null )
+ {
+ this.value = value;
+ }
+ }
+
+ public String getValue()
+ {
+ return value;
+ }
+
+ @Override
+ public String toString()
+ {
+ return "TestMetadataFacet{" + "value='" + value + '\'' + '}';
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ TestMetadataFacet that = (TestMetadataFacet) o;
+
+ if ( value != null ? !value.equals( that.value ) : that.value != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return value != null ? value.hashCode() : 0;
+ }
+ }
+}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ --><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>archiva-modules</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>plugins</artifactId>
+ <name>Archiva Core Plugins</name>
+ <packaging>pom</packaging>
+ <modules>
+ <module>metadata-repository-file</module>
+ <module>maven2-repository</module>
+ <module>repository-statistics</module>
+ <module>problem-reports</module>
++ <module>audit</module>
+ </modules>
+</project>
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>plugins</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>problem-reports</artifactId>
+ <name>Archiva Problem Reporting Plugin</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
+ </dependency>
+ <!-- TODO: we want to move the event handling, or perhaps centralise under deleteArtifact() in the metadata repository itself -->
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-layer</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-checksum</artifactId>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+package org.apache.archiva.reports;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.metadata.model.MetadataFacet;
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.reports"
+ */
+public class RepositoryProblemFacetFactory
+ implements MetadataFacetFactory
+{
+ public MetadataFacet createMetadataFacet()
+ {
+ return new RepositoryProblemFacet();
+ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ return new RepositoryProblemFacet();
++ }
+}
--- /dev/null
- <version>1.3-SNAPSHOT</version>
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>plugins</artifactId>
+ <groupId>org.apache.archiva</groupId>
++ <version>1.4-SNAPSHOT</version>
+ </parent>
+ <artifactId>repository-statistics</artifactId>
+ <name>Repository Statistics</name>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-spring</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>jcl-over-slf4j</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-layer</artifactId>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
- Date date = RepositoryStatistics.SCAN_TIMESTAMP.parse( name );
+package org.apache.archiva.metadata.repository.stats;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.text.ParseException;
++import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.maven.archiva.repository.ManagedRepositoryContent;
+import org.apache.maven.archiva.repository.RepositoryContentFactory;
+import org.apache.maven.archiva.repository.RepositoryException;
+import org.apache.maven.archiva.repository.layout.LayoutException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager" role-hint="default"
+ */
+public class DefaultRepositoryStatisticsManager
+ implements RepositoryStatisticsManager
+{
+ private static final Logger log = LoggerFactory.getLogger( DefaultRepositoryStatisticsManager.class );
+
+ /**
+ * @plexus.requirement
+ */
+ private MetadataRepository metadataRepository;
+
+ /**
+ * @plexus.requirement
+ */
+ private RepositoryContentFactory repositoryContentFactory;
+
+ public RepositoryStatistics getLastStatistics( String repositoryId )
+ {
+ // TODO: consider a more efficient implementation that directly gets the last one from the content repository
+ List<String> scans = metadataRepository.getMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
+ Collections.sort( scans );
+ if ( !scans.isEmpty() )
+ {
+ String name = scans.get( scans.size() - 1 );
+ return (RepositoryStatistics) metadataRepository.getMetadataFacet( repositoryId,
+ RepositoryStatistics.FACET_ID, name );
+ }
+ else
+ {
+ return null;
+ }
+ }
+
+ private void walkRepository( RepositoryStatistics stats, String repositoryId, String ns,
+ ManagedRepositoryContent repositoryContent )
+ {
+ for ( String namespace : metadataRepository.getNamespaces( repositoryId, ns ) )
+ {
+ walkRepository( stats, repositoryId, ns + "." + namespace, repositoryContent );
+ }
+
+ Collection<String> projects = metadataRepository.getProjects( repositoryId, ns );
+ if ( !projects.isEmpty() )
+ {
+ stats.setTotalGroupCount( stats.getTotalGroupCount() + 1 );
+ stats.setTotalProjectCount( stats.getTotalProjectCount() + projects.size() );
+
+ for ( String project : projects )
+ {
+ for ( String version : metadataRepository.getProjectVersions( repositoryId, ns, project ) )
+ {
+ for ( ArtifactMetadata artifact : metadataRepository.getArtifacts( repositoryId, ns, project,
+ version ) )
+ {
+ stats.setTotalArtifactCount( stats.getTotalArtifactCount() + 1 );
+ stats.setTotalArtifactFileSize( stats.getTotalArtifactFileSize() + artifact.getSize() );
+
+ // TODO: need a maven2 metadata repository API equivalent
+ try
+ {
+ String type = repositoryContent.toArtifactReference(
+ ns.replace( '.', '/' ) + "/" + project + "/" + version + "/" +
+ artifact.getId() ).getType();
+ stats.setTotalCountForType( type, stats.getTotalCountForType( type ) + 1 );
+ }
+ catch ( LayoutException e )
+ {
+ // ignore
+ }
+ }
+ }
+ }
+ }
+ }
+
+
+ public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
+ long newFiles )
+ {
+ RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
+ repositoryStatistics.setScanStartTime( startTime );
+ repositoryStatistics.setScanEndTime( endTime );
+ repositoryStatistics.setTotalFileCount( totalFiles );
+ repositoryStatistics.setNewFileCount( newFiles );
+
+ // In the future, instead of being tied to a scan we might want to record information in the fly based on
+ // events that are occurring. Even without these totals we could query much of the information on demand based
+ // on information from the metadata content repository. In the mean time, we lock information in at scan time.
+ // Note that if new types are later discoverable due to a code change or new plugin, historical stats will not
+ // be updated and the repository will need to be rescanned.
+
+ long startWalk = System.currentTimeMillis();
+ // TODO: we can probably get a more efficient implementation directly from the metadata repository, but for now
+ // we just walk it. Alternatively, we could build an index, or store the aggregate information and update
+ // it on the fly
+ for ( String ns : metadataRepository.getRootNamespaces( repositoryId ) )
+ {
+ ManagedRepositoryContent content;
+ try
+ {
+ content = repositoryContentFactory.getManagedRepositoryContent( repositoryId );
+ }
+ catch ( RepositoryException e )
+ {
+ throw new RuntimeException( e );
+ }
+ walkRepository( repositoryStatistics, repositoryId, ns, content );
+ }
+ log.info( "Repository walk for statistics executed in " + ( System.currentTimeMillis() - startWalk ) + "ms" );
+
+ metadataRepository.addMetadataFacet( repositoryId, repositoryStatistics );
+ }
+
+ public void deleteStatistics( String repositoryId )
+ {
+ metadataRepository.removeMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
+ }
+
+ public List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startTime, Date endTime )
+ {
+ List<RepositoryStatistics> results = new ArrayList<RepositoryStatistics>();
+ List<String> list = metadataRepository.getMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
+ Collections.sort( list, Collections.reverseOrder() );
+ for ( String name : list )
+ {
+ try
+ {
++ Date date = new SimpleDateFormat( RepositoryStatistics.SCAN_TIMESTAMP_FORMAT ).parse( name );
+ if ( ( startTime == null || !date.before( startTime ) ) &&
+ ( endTime == null || !date.after( endTime ) ) )
+ {
+ RepositoryStatistics stats =
+ (RepositoryStatistics) metadataRepository.getMetadataFacet( repositoryId,
+ RepositoryStatistics.FACET_ID,
+ name );
+ results.add( stats );
+ }
+ }
+ catch ( ParseException e )
+ {
+ log.error( "Invalid scan result found in the metadata repository: " + e.getMessage() );
+ // continue and ignore this one
+ }
+ }
+ return results;
+ }
+
+ public void setMetadataRepository( MetadataRepository metadataRepository )
+ {
+ this.metadataRepository = metadataRepository;
+ }
+
+ public void setRepositoryContentFactory( RepositoryContentFactory repositoryContentFactory )
+ {
+ this.repositoryContentFactory = repositoryContentFactory;
+ }
+}
--- /dev/null
- import java.text.DateFormat;
+package org.apache.archiva.metadata.repository.stats;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
- static final DateFormat SCAN_TIMESTAMP = new SimpleDateFormat( "yyyy/MM/dd/HHmmss.SSS" );
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.archiva.metadata.model.MetadataFacet;
+
+public class RepositoryStatistics
+ implements MetadataFacet
+{
+ private Date scanEndTime;
+
+ private Date scanStartTime;
+
+ private long totalArtifactCount;
+
+ private long totalArtifactFileSize;
+
+ private long totalFileCount;
+
+ private long totalGroupCount;
+
+ private long totalProjectCount;
+
+ private long newFileCount;
+
+ public static String FACET_ID = "org.apache.archiva.metadata.repository.stats";
+
- return SCAN_TIMESTAMP.format( scanStartTime );
++ static final String SCAN_TIMESTAMP_FORMAT = "yyyy/MM/dd/HHmmss.SSS";
+
+ private Map<String, Long> totalCountForType = new HashMap<String, Long>();
+
+ public Date getScanEndTime()
+ {
+ return scanEndTime;
+ }
+
+ public void setScanEndTime( Date scanEndTime )
+ {
+ this.scanEndTime = scanEndTime;
+ }
+
+ public Date getScanStartTime()
+ {
+ return scanStartTime;
+ }
+
+ public void setScanStartTime( Date scanStartTime )
+ {
+ this.scanStartTime = scanStartTime;
+ }
+
+ public long getTotalArtifactCount()
+ {
+ return totalArtifactCount;
+ }
+
+ public void setTotalArtifactCount( long totalArtifactCount )
+ {
+ this.totalArtifactCount = totalArtifactCount;
+ }
+
+ public long getTotalArtifactFileSize()
+ {
+ return totalArtifactFileSize;
+ }
+
+ public void setTotalArtifactFileSize( long totalArtifactFileSize )
+ {
+ this.totalArtifactFileSize = totalArtifactFileSize;
+ }
+
+ public long getTotalFileCount()
+ {
+ return totalFileCount;
+ }
+
+ public void setTotalFileCount( long totalFileCount )
+ {
+ this.totalFileCount = totalFileCount;
+ }
+
+ public long getTotalGroupCount()
+ {
+ return totalGroupCount;
+ }
+
+ public void setTotalGroupCount( long totalGroupCount )
+ {
+ this.totalGroupCount = totalGroupCount;
+ }
+
+ public long getTotalProjectCount()
+ {
+ return totalProjectCount;
+ }
+
+ public void setTotalProjectCount( long totalProjectCount )
+ {
+ this.totalProjectCount = totalProjectCount;
+ }
+
+ public void setNewFileCount( long newFileCount )
+ {
+ this.newFileCount = newFileCount;
+ }
+
+ public long getNewFileCount()
+ {
+ return newFileCount;
+ }
+
+ public long getDuration()
+ {
+ return scanEndTime.getTime() - scanStartTime.getTime();
+ }
+
+ public String getFacetId()
+ {
+ return FACET_ID;
+ }
+
+ public String getName()
+ {
++ return new SimpleDateFormat( SCAN_TIMESTAMP_FORMAT ).format( scanStartTime );
+ }
+
+ public Map<String, String> toProperties()
+ {
+ Map<String, String> properties = new HashMap<String, String>();
+ properties.put( "scanEndTime", String.valueOf( scanEndTime.getTime() ) );
+ properties.put( "scanStartTime", String.valueOf( scanStartTime.getTime() ) );
+ properties.put( "totalArtifactCount", String.valueOf( totalArtifactCount ) );
+ properties.put( "totalArtifactFileSize", String.valueOf( totalArtifactFileSize ) );
+ properties.put( "totalFileCount", String.valueOf( totalFileCount ) );
+ properties.put( "totalGroupCount", String.valueOf( totalGroupCount ) );
+ properties.put( "totalProjectCount", String.valueOf( totalProjectCount ) );
+ properties.put( "newFileCount", String.valueOf( newFileCount ) );
+ for ( Map.Entry<String, Long> entry : totalCountForType.entrySet() )
+ {
+ properties.put( "count-" + entry.getKey(), String.valueOf( entry.getValue() ) );
+ }
+ return properties;
+ }
+
+ public void fromProperties( Map<String, String> properties )
+ {
+ scanEndTime = new Date( Long.valueOf( properties.get( "scanEndTime" ) ) );
+ scanStartTime = new Date( Long.valueOf( properties.get( "scanStartTime" ) ) );
+ totalArtifactCount = Long.valueOf( properties.get( "totalArtifactCount" ) );
+ totalArtifactFileSize = Long.valueOf( properties.get( "totalArtifactFileSize" ) );
+ totalFileCount = Long.valueOf( properties.get( "totalFileCount" ) );
+ totalGroupCount = Long.valueOf( properties.get( "totalGroupCount" ) );
+ totalProjectCount = Long.valueOf( properties.get( "totalProjectCount" ) );
+ newFileCount = Long.valueOf( properties.get( "newFileCount" ) );
+ totalCountForType.clear();
+ for ( Map.Entry<String, String> entry : properties.entrySet() )
+ {
+ if ( entry.getKey().startsWith( "count-" ) )
+ {
+ totalCountForType.put( entry.getKey().substring( 6 ), Long.valueOf( entry.getValue() ) );
+ }
+ }
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ RepositoryStatistics that = (RepositoryStatistics) o;
+
+ if ( newFileCount != that.newFileCount )
+ {
+ return false;
+ }
+ if ( totalArtifactCount != that.totalArtifactCount )
+ {
+ return false;
+ }
+ if ( totalArtifactFileSize != that.totalArtifactFileSize )
+ {
+ return false;
+ }
+ if ( totalFileCount != that.totalFileCount )
+ {
+ return false;
+ }
+ if ( totalGroupCount != that.totalGroupCount )
+ {
+ return false;
+ }
+ if ( totalProjectCount != that.totalProjectCount )
+ {
+ return false;
+ }
+ if ( !scanEndTime.equals( that.scanEndTime ) )
+ {
+ return false;
+ }
+ if ( !scanStartTime.equals( that.scanStartTime ) )
+ {
+ return false;
+ }
+ if ( !totalCountForType.equals( that.totalCountForType ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ int result = scanEndTime.hashCode();
+ result = 31 * result + scanStartTime.hashCode();
+ result = 31 * result + (int) ( totalArtifactCount ^ ( totalArtifactCount >>> 32 ) );
+ result = 31 * result + (int) ( totalArtifactFileSize ^ ( totalArtifactFileSize >>> 32 ) );
+ result = 31 * result + (int) ( totalFileCount ^ ( totalFileCount >>> 32 ) );
+ result = 31 * result + (int) ( totalGroupCount ^ ( totalGroupCount >>> 32 ) );
+ result = 31 * result + (int) ( totalProjectCount ^ ( totalProjectCount >>> 32 ) );
+ result = 31 * result + (int) ( newFileCount ^ ( newFileCount >>> 32 ) );
+ result = 31 * result + totalCountForType.hashCode();
+ return result;
+ }
+
+ @Override
+ public String toString()
+ {
+ return "RepositoryStatistics{" + "scanEndTime=" + scanEndTime + ", scanStartTime=" + scanStartTime +
+ ", totalArtifactCount=" + totalArtifactCount + ", totalArtifactFileSize=" + totalArtifactFileSize +
+ ", totalFileCount=" + totalFileCount + ", totalGroupCount=" + totalGroupCount + ", totalProjectCount=" +
+ totalProjectCount + ", newFileCount=" + newFileCount + ", totalCountForType=" + totalCountForType + '}';
+ }
+
+ public Map<String, Long> getTotalCountForType()
+ {
+ return totalCountForType;
+ }
+
+ public long getTotalCountForType( String type )
+ {
+ Long value = totalCountForType.get( type );
+ return value != null ? value : 0;
+ }
+
+ public void setTotalCountForType( String type, long count )
+ {
+ totalCountForType.put( type, count );
+ }
+}
--- /dev/null
+package org.apache.archiva.metadata.repository.stats;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.metadata.model.MetadataFacet;
+import org.apache.archiva.metadata.model.MetadataFacetFactory;
+
+/**
+ * @plexus.component role="org.apache.archiva.metadata.model.MetadataFacetFactory" role-hint="org.apache.archiva.metadata.repository.stats"
+ */
+public class RepositoryStatisticsFactory
+ implements MetadataFacetFactory
+{
+ public MetadataFacet createMetadataFacet()
+ {
+ return new RepositoryStatistics();
+ }
++
++ public MetadataFacet createMetadataFacet( String repositoryId, String name )
++ {
++ return new RepositoryStatistics();
++ }
+}
--- /dev/null
- Date endTime = new Date( RepositoryStatistics.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ).getTime() + 60000 );
+package org.apache.archiva.metadata.repository.stats;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.text.ParseException;
++import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import junit.framework.TestCase;
+import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.metadata.repository.MetadataRepository;
+import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.maven.archiva.repository.RepositoryContentFactory;
+import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
+import org.easymock.MockControl;
+import org.easymock.classextension.MockClassControl;
+
+public class RepositoryStatisticsManagerTest
+ extends TestCase
+{
+ private DefaultRepositoryStatisticsManager repositoryStatisticsManager;
+
+ private static final String TEST_REPO_ID = "test-repo";
+
+ private MockControl metadataRepositoryControl;
+
+ private MetadataRepository metadataRepository;
+
+ private static final String FIRST_TEST_SCAN = "2009/12/01/123456.789";
+
+ private static final String SECOND_TEST_SCAN = "2009/12/02/012345.678";
+
+ private Map<String, RepositoryStatistics> statsCreated = new LinkedHashMap<String, RepositoryStatistics>();
+
+ @Override
+ protected void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ repositoryStatisticsManager = new DefaultRepositoryStatisticsManager();
+
+ metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
+ metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
+ repositoryStatisticsManager.setMetadataRepository( metadataRepository );
+
+ ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
+ repository.setId( TEST_REPO_ID );
+ repository.setLocation( "" );
+ ManagedDefaultRepositoryContent content = new ManagedDefaultRepositoryContent();
+ content.setRepository( repository );
+ MockControl control = MockClassControl.createControl( RepositoryContentFactory.class );
+ RepositoryContentFactory contentFactory = (RepositoryContentFactory) control.getMock();
+ contentFactory.getManagedRepositoryContent( TEST_REPO_ID );
+ control.setDefaultReturnValue( content );
+ control.replay();
+ repositoryStatisticsManager.setRepositoryContentFactory( contentFactory );
+ }
+
+ public void testGetLatestStats()
+ throws ParseException
+ {
- stats.setScanStartTime( RepositoryStatistics.SCAN_TIMESTAMP.parse( SECOND_TEST_SCAN ) );
++ Date startTime = parseTimestamp( SECOND_TEST_SCAN );
++ Date endTime = new Date( startTime.getTime() + 60000 );
+
+ RepositoryStatistics stats = new RepositoryStatistics();
- assertEquals( SECOND_TEST_SCAN, RepositoryStatistics.SCAN_TIMESTAMP.format( stats.getScanStartTime() ) );
++ stats.setScanStartTime( startTime );
+ stats.setScanEndTime( endTime );
+ stats.setTotalArtifactFileSize( 1314527915L );
+ stats.setNewFileCount( 123 );
+ stats.setTotalArtifactCount( 10386 );
+ stats.setTotalProjectCount( 2031 );
+ stats.setTotalGroupCount( 529 );
+ stats.setTotalFileCount( 56229 );
+
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Arrays.asList( FIRST_TEST_SCAN, SECOND_TEST_SCAN ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, SECOND_TEST_SCAN ),
+ stats );
+ metadataRepositoryControl.replay();
+
+ stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ assertNotNull( stats );
+ assertEquals( 1314527915L, stats.getTotalArtifactFileSize() );
+ assertEquals( 123, stats.getNewFileCount() );
+ assertEquals( 10386, stats.getTotalArtifactCount() );
+ assertEquals( 2031, stats.getTotalProjectCount() );
+ assertEquals( 529, stats.getTotalGroupCount() );
+ assertEquals( 56229, stats.getTotalFileCount() );
- createArtifact( "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT",
- "pom" ) ) );
++ assertEquals( SECOND_TEST_SCAN, formatTimestamp( stats.getScanStartTime() ) );
+ assertEquals( SECOND_TEST_SCAN, stats.getName() );
+ assertEquals( endTime, stats.getScanEndTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
++ private static String formatTimestamp( Date value )
++ {
++ return new SimpleDateFormat( RepositoryStatistics.SCAN_TIMESTAMP_FORMAT ).format( value );
++ }
++
++ private static Date parseTimestamp( String value )
++ throws ParseException
++ {
++ return new SimpleDateFormat( RepositoryStatistics.SCAN_TIMESTAMP_FORMAT ).parse( value );
++ }
++
+ public void testGetLatestStatsWhenEmpty()
+ {
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Collections.emptyList() );
+ metadataRepositoryControl.replay();
+
+ RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ assertNull( stats );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testAddNewStats()
+ {
+ Date current = new Date();
+ Date startTime = new Date( current.getTime() - 12345 );
+
+ RepositoryStatistics stats = createTestStats( startTime, current );
+
+ walkRepository( 1 );
+
+ metadataRepository.addMetadataFacet( TEST_REPO_ID, stats );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Arrays.asList( stats.getName() ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats.getName() ),
+ stats );
+
+ metadataRepositoryControl.replay();
+
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime, current, 56345, 45 );
+
+ stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
+ assertNotNull( stats );
+ assertEquals( 246900, stats.getTotalArtifactFileSize() );
+ assertEquals( 45, stats.getNewFileCount() );
+ assertEquals( 20, stats.getTotalArtifactCount() );
+ assertEquals( 5, stats.getTotalProjectCount() );
+ assertEquals( 4, stats.getTotalGroupCount() );
+ assertEquals( 56345, stats.getTotalFileCount() );
+ assertEquals( current.getTime() - 12345, stats.getScanStartTime().getTime() );
+ assertEquals( current, stats.getScanEndTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testDeleteStats()
+ {
+ walkRepository( 2 );
+
+ Date current = new Date();
+
+ Date startTime1 = new Date( current.getTime() - 12345 );
+ RepositoryStatistics stats1 = createTestStats( startTime1, new Date( current.getTime() - 6000 ) );
+ metadataRepository.addMetadataFacet( TEST_REPO_ID, stats1 );
+
+ Date startTime2 = new Date( current.getTime() - 3000 );
+ RepositoryStatistics stats2 = createTestStats( startTime2, current );
+ metadataRepository.addMetadataFacet( TEST_REPO_ID, stats2 );
+
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Arrays.asList( stats1.getName(), stats2.getName() ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, stats2.getName() ),
+ stats2 );
+
+ metadataRepository.removeMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID );
+
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Collections.emptyList() );
+
+ metadataRepositoryControl.replay();
+
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime1, stats1.getScanEndTime(), 56345,
+ 45 );
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime2, stats2.getScanEndTime(), 56345,
+ 45 );
+
+ assertNotNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+
+ repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
+
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testDeleteStatsWhenEmpty()
+ {
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ),
+ Collections.emptyList(), 2 );
+ metadataRepository.removeMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID );
+
+ metadataRepositoryControl.replay();
+
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+
+ repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
+
+ assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testGetStatsRangeInside()
+ {
+ walkRepository( 3 );
+
+ Date current = new Date();
+
+ addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
+ addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
+ addStats( new Date( current.getTime() - 1000 ), current );
+
+ ArrayList<String> keys = new ArrayList<String>( statsCreated.keySet() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ), keys );
+
+ // only match the middle one
+ String key = keys.get( 1 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+
+ metadataRepositoryControl.replay();
+
+ for ( RepositoryStatistics stats : statsCreated.values() )
+ {
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
+ }
+
+ List<RepositoryStatistics> list =
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date( current.getTime() - 4000 ),
+ new Date( current.getTime() - 2000 ) );
+
+ assertEquals( 1, list.size() );
+ assertEquals( new Date( current.getTime() - 3000 ), list.get( 0 ).getScanStartTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testGetStatsRangeUpperOutside()
+ {
+ walkRepository( 3 );
+
+ Date current = new Date();
+
+ addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
+ addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
+ addStats( new Date( current.getTime() - 1000 ), current );
+
+ ArrayList<String> keys = new ArrayList<String>( statsCreated.keySet() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ), keys );
+
+ String key = keys.get( 1 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+ key = keys.get( 2 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+
+ metadataRepositoryControl.replay();
+
+ for ( RepositoryStatistics stats : statsCreated.values() )
+ {
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
+ }
+
+ List<RepositoryStatistics> list =
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date( current.getTime() - 4000 ),
+ current );
+
+ assertEquals( 2, list.size() );
+ assertEquals( new Date( current.getTime() - 3000 ), list.get( 1 ).getScanStartTime() );
+ assertEquals( new Date( current.getTime() - 1000 ), list.get( 0 ).getScanStartTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testGetStatsRangeLowerOutside()
+ {
+ walkRepository( 3 );
+
+ Date current = new Date();
+
+ addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
+ addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
+ addStats( new Date( current.getTime() - 1000 ), current );
+
+ ArrayList<String> keys = new ArrayList<String>( statsCreated.keySet() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ), keys );
+
+ String key = keys.get( 0 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+ key = keys.get( 1 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+
+ metadataRepositoryControl.replay();
+
+ for ( RepositoryStatistics stats : statsCreated.values() )
+ {
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
+ }
+
+ List<RepositoryStatistics> list =
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ),
+ new Date( current.getTime() - 2000 ) );
+
+ assertEquals( 2, list.size() );
+ assertEquals( new Date( current.getTime() - 12345 ), list.get( 1 ).getScanStartTime() );
+ assertEquals( new Date( current.getTime() - 3000 ), list.get( 0 ).getScanStartTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testGetStatsRangeLowerAndUpperOutside()
+ {
+ walkRepository( 3 );
+
+ Date current = new Date();
+
+ addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
+ addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
+ addStats( new Date( current.getTime() - 1000 ), current );
+
+ ArrayList<String> keys = new ArrayList<String>( statsCreated.keySet() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ), keys );
+
+ String key = keys.get( 0 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+ key = keys.get( 1 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+ key = keys.get( 2 );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacet( TEST_REPO_ID, RepositoryStatistics.FACET_ID, key ),
+ statsCreated.get( key ) );
+
+ metadataRepositoryControl.replay();
+
+ for ( RepositoryStatistics stats : statsCreated.values() )
+ {
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
+ }
+
+ List<RepositoryStatistics> list =
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ),
+ current );
+
+ assertEquals( 3, list.size() );
+ assertEquals( new Date( current.getTime() - 12345 ), list.get( 2 ).getScanStartTime() );
+ assertEquals( new Date( current.getTime() - 3000 ), list.get( 1 ).getScanStartTime() );
+ assertEquals( new Date( current.getTime() - 1000 ), list.get( 0 ).getScanStartTime() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ public void testGetStatsRangeNotInside()
+ {
+ walkRepository( 3 );
+
+ Date current = new Date();
+
+ addStats( new Date( current.getTime() - 12345 ), new Date( current.getTime() - 6000 ) );
+ addStats( new Date( current.getTime() - 3000 ), new Date( current.getTime() - 2000 ) );
+ addStats( new Date( current.getTime() - 1000 ), current );
+
+ ArrayList<String> keys = new ArrayList<String>( statsCreated.keySet() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryStatistics.FACET_ID ), keys );
+
+ metadataRepositoryControl.replay();
+
+ for ( RepositoryStatistics stats : statsCreated.values() )
+ {
+ repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
+ stats.getScanEndTime(), 56345, 45 );
+ }
+
+ List<RepositoryStatistics> list =
+ repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date( current.getTime() - 20000 ),
+ new Date( current.getTime() - 16000 ) );
+
+ assertEquals( 0, list.size() );
+
+ metadataRepositoryControl.verify();
+ }
+
+ private void addStats( Date startTime, Date endTime )
+ {
+ RepositoryStatistics stats = createTestStats( startTime, endTime );
+ metadataRepository.addMetadataFacet( TEST_REPO_ID, stats );
+ statsCreated.put( stats.getName(), stats );
+ }
+
+ private ArtifactMetadata createArtifact( String namespace, String projectId, String projectVersion, String type )
+ {
+ ArtifactMetadata metadata = new ArtifactMetadata();
+ metadata.setRepositoryId( TEST_REPO_ID );
+ metadata.setId( projectId + "-" + projectVersion + "." + type );
+ metadata.setProject( projectId );
+ metadata.setSize( 12345L );
+ metadata.setVersion( projectVersion );
+ metadata.setNamespace( namespace );
+ return metadata;
+ }
+
+ private RepositoryStatistics createTestStats( Date startTime, Date endTime )
+ {
+ RepositoryStatistics stats = new RepositoryStatistics();
+ stats.setScanStartTime( startTime );
+ stats.setScanEndTime( endTime );
+ stats.setTotalArtifactFileSize( 20 * 12345L );
+ stats.setNewFileCount( 45 );
+ stats.setTotalArtifactCount( 20 );
+ stats.setTotalProjectCount( 5 );
+ stats.setTotalGroupCount( 4 );
+ stats.setTotalFileCount( 56345 );
+ stats.setTotalCountForType( "jar", 10 );
+ stats.setTotalCountForType( "pom", 10 );
+ return stats;
+ }
+
+ private void walkRepository( int count )
+ {
+ for ( int i = 0; i < count; i++ )
+ {
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getRootNamespaces( TEST_REPO_ID ),
+ Arrays.asList( "com", "org" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "com" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "com" ),
+ Arrays.asList( "example" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "com.example" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "com.example" ),
+ Arrays.asList( "example-project" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "com.example", "example-project" ),
+ Arrays.asList( "1.0", "1.1" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "com.example", "example-project", "1.0" ),
+ Arrays.asList( createArtifact( "com.example", "example-project", "1.0", "jar" ),
+ createArtifact( "com.example", "example-project", "1.0", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "com.example", "example-project", "1.1" ),
+ Arrays.asList( createArtifact( "com.example", "example-project", "1.1", "jar" ),
+ createArtifact( "com.example", "example-project", "1.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org" ),
+ Arrays.asList( "apache", "codehaus" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache" ),
+ Arrays.asList( "archiva", "maven" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org.apache" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache.archiva" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.apache.archiva" ),
+ Arrays.asList( "metadata-repository-api", "metadata-model" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api" ),
+ Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api",
+ "1.3-SNAPSHOT" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3-SNAPSHOT",
+ "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-repository-api", "1.3" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-repository-api", "1.3", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.archiva", "metadata-model" ),
+ Arrays.asList( "1.3-SNAPSHOT", "1.3" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT", "jar" ),
++ createArtifact( "org.apache.archiva", "metadata-model", "1.3-SNAPSHOT", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.archiva", "metadata-model", "1.3" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "metadata-model", "1.3", "jar" ),
+ createArtifact( "org.apache.archiva", "metadata-model", "1.3", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.apache.maven" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.apache.maven" ), Arrays.asList( "maven-model" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.apache.maven", "maven-model" ),
+ Arrays.asList( "2.2.1" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.apache.maven", "maven-model", "2.2.1" ),
+ Arrays.asList( createArtifact( "org.apache.archiva", "maven-model", "2.2.1", "jar" ),
+ createArtifact( "org.apache.archiva", "maven-model", "2.2.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getNamespaces( TEST_REPO_ID, "org.codehaus" ),
+ Arrays.asList( "plexus" ) );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn( metadataRepository.getProjects( TEST_REPO_ID, "org.codehaus" ),
+ Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getNamespaces( TEST_REPO_ID, "org.codehaus.plexus" ), Arrays.asList() );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjects( TEST_REPO_ID, "org.codehaus.plexus" ),
+ Arrays.asList( "plexus-spring" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getProjectVersions( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring" ),
+ Arrays.asList( "1.0", "1.1", "1.2" ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.0" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.0", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.1" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.1", "pom" ) ) );
+ metadataRepositoryControl.expectAndReturn(
+ metadataRepository.getArtifacts( TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2" ),
+ Arrays.asList( createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "jar" ),
+ createArtifact( "org.codehaus.plexus", "plexus-spring", "1.2", "pom" ) ) );
+ }
+ }
+}
<artifactId>activation</artifactId>
<version>1.1</version>
</dependency>
- <version>1.3-SNAPSHOT</version>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-model</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-api</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>metadata-repository-file</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>repository-statistics</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>problem-reports</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
++ </dependency>
++ <dependency>
++ <groupId>org.apache.archiva</groupId>
++ <artifactId>audit</artifactId>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>maven2-repository</artifactId>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-applet</artifactId>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-core-consumers</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <version>1.4-SNAPSHOT</version>
+ </dependency>
++ <dependency>
++ <groupId>org.apache.archiva</groupId>
++ <artifactId>archiva-metadata-consumer</artifactId>
++ <version>1.4-SNAPSHOT</version>
++ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-database</artifactId>
+ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-database-consumers</artifactId>
+ <version>1.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-report-manager</artifactId>
+ <artifactId>archiva-repository-layer</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <version>1.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-repository-layer</artifactId>
+ <artifactId>archiva-repository-scanner</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduler-api</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <version>1.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
- <artifactId>archiva-scheduled</artifactId>
+ <artifactId>archiva-scheduler-repository</artifactId>
- <version>1.3-SNAPSHOT</version>
++ <version>1.4-SNAPSHOT</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduler-indexing</artifactId>
- <version>1.3-SNAPSHOT</version>
+ <version>1.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
</dependencyManagement>
<properties>
<maven.version>2.0.8</maven.version>
+ <maven3x.version>3.0-alpha-4</maven3x.version>
<wagon.version>1.0-beta-5</wagon.version>
- <redback.version>1.2.2</redback.version>
+ <redback.version>1.2.3</redback.version>
<jetty.version>6.1.19</jetty.version>
<slf4j.version>1.5.8</slf4j.version>
<binder.version>0.9</binder.version>