--- /dev/null
+package org.apache.maven.archiva.configuration.functors;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.collections.Closure;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * RepositoryConfigurationToMapClosure
+ *
+ * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @version $Id$
+ */
+public class RepositoryConfigurationToMapClosure
+ implements Closure
+{
+ private Map map = new HashMap();
+
+ public void execute( Object input )
+ {
+ if ( input instanceof RepositoryConfiguration )
+ {
+ RepositoryConfiguration repo = (RepositoryConfiguration) input;
+ map.put( repo.getId(), repo );
+ }
+ }
+
+ public Map getMap()
+ {
+ return map;
+ }
+}
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
+import org.codehaus.plexus.scheduler.CronExpressionValidator;
import org.codehaus.plexus.scheduler.Scheduler;
import org.codehaus.plexus.taskqueue.TaskQueue;
import org.codehaus.plexus.taskqueue.TaskQueueException;
public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
+ public static final String CRON_HOURLY = "0 0 * * * ?";
+
public void start()
throws StartingException
{
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration) i.next();
- scheduleRepositoryJobs( repoConfig );
+ if ( repoConfig.isManaged() && repoConfig.isIndexed() )
+ {
+ scheduleRepositoryJobs( repoConfig );
+ }
}
scheduleDatabaseJobs();
// get the cron string for these database scanning jobs
String cronString = repoConfig.getRefreshCronExpression();
+ CronExpressionValidator cronValidator = new CronExpressionValidator();
+ if ( !cronValidator.validate( cronString ) )
+ {
+ getLogger().warn(
+ "Cron expression [" + cronString + "] for repository [" + repoConfig.getId()
+ + "] is invalid. Defaulting to hourly." );
+ cronString = CRON_HOURLY;
+ }
+
// setup the unprocessed artifact job
JobDetail repositoryJob = new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP,
RepositoryTaskJob.class );
return !queue.isEmpty();
}
-
+
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
repositoryScanningQueue.put( task );
}
-
+
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException
{
* under the License.
*/
+import org.apache.commons.collections.Closure;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.functors.IfClosure;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
-import org.apache.maven.archiva.database.ArchivaDAO;
-import org.apache.maven.archiva.database.ArchivaDatabaseException;
-import org.apache.maven.archiva.database.ObjectNotFoundException;
-import org.apache.maven.archiva.model.ArchivaRepository;
+import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.configuration.functors.LocalRepositoryPredicate;
+import org.apache.maven.archiva.configuration.functors.RepositoryConfigurationToMapClosure;
+import org.apache.maven.archiva.model.RepositoryURL;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.redback.authentication.AuthenticationException;
import org.codehaus.plexus.redback.authentication.AuthenticationResult;
import java.io.File;
import java.io.IOException;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
*/
private AuditLog audit;
- /**
- * @plexus.requirement role-hint="jdo"
- */
- private ArchivaDAO dao;
-
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;
+ private Map repositoryMap = new HashMap();
+
public void initComponents()
throws ServletException
{
httpAuth = (HttpAuthenticator) lookup( HttpAuthenticator.ROLE, "basic" );
audit = (AuditLog) lookup( AuditLog.ROLE );
- dao = (ArchivaDAO) lookup( ArchivaDAO.ROLE, "jdo" );
+ updateRepositoryMap();
+
configuration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
configuration.addChangeListener( this );
}
public void initServers( ServletConfig servletConfig )
throws DavServerException
{
- try
+ List repositories = configuration.getConfiguration().getRepositories();
+ Iterator itrepos = repositories.iterator();
+ while ( itrepos.hasNext() )
{
- List repositories = dao.getRepositoryDAO().getRepositories();
- Iterator itrepos = repositories.iterator();
- while ( itrepos.hasNext() )
+ RepositoryConfiguration repo = (RepositoryConfiguration) itrepos.next();
+ if ( !repo.isManaged() )
{
- ArchivaRepository repo = (ArchivaRepository) itrepos.next();
- if ( !repo.isManaged() )
- {
- // Skip non-managed.
- continue;
- }
+ // Skip non-managed.
+ continue;
+ }
- File repoDir = new File( repo.getUrl().getPath() );
+ RepositoryURL url = new RepositoryURL( repo.getUrl() );
+ File repoDir = new File( url.getPath() );
- if ( !repoDir.exists() )
+ if ( !repoDir.exists() )
+ {
+ if ( !repoDir.mkdirs() )
{
- if ( !repoDir.mkdirs() )
- {
- // Skip invalid directories.
- log( "Unable to create missing directory for " + repo.getUrl().getPath() );
- continue;
- }
+ // Skip invalid directories.
+ log( "Unable to create missing directory for " + url.getPath() );
+ continue;
}
+ }
- DavServerComponent server = createServer( repo.getId(), repoDir, servletConfig );
+ DavServerComponent server = createServer( repo.getId(), repoDir, servletConfig );
- server.addListener( audit );
- }
- }
- catch ( ArchivaDatabaseException e )
- {
- throw new DavServerException( "Unable to initialized dav servers: " + e.getMessage(), e );
+ server.addListener( audit );
}
}
- public ArchivaRepository getRepository( DavServerRequest request )
+ public RepositoryConfiguration getRepository( DavServerRequest request )
{
- String id = request.getPrefix();
- try
- {
- return dao.getRepositoryDAO().getRepository( id );
- }
- catch ( ObjectNotFoundException e )
- {
- log( "Unable to find repository for id [" + id + "]" );
- return null;
- }
- catch ( ArchivaDatabaseException e )
- {
- log( "Unable to find repository for id [" + id + "]: " + e.getMessage(), e );
- return null;
- }
+ return (RepositoryConfiguration) repositoryMap.get( request.getPrefix() );
}
public String getRepositoryName( DavServerRequest request )
{
- ArchivaRepository repoConfig = getRepository( request );
+ RepositoryConfiguration repoConfig = getRepository( request );
if ( repoConfig == null )
{
return "Unknown";
}
- return repoConfig.getModel().getName();
+ return repoConfig.getName();
+ }
+
+ private void updateRepositoryMap()
+ {
+ RepositoryConfigurationToMapClosure repoMapClosure = new RepositoryConfigurationToMapClosure();
+ Closure localRepoMap = IfClosure.getInstance( LocalRepositoryPredicate.getInstance(), repoMapClosure );
+ CollectionUtils.forAllDo( configuration.getConfiguration().getRepositories(), localRepoMap );
+
+ this.repositoryMap.clear();
+ this.repositoryMap.putAll( repoMapClosure.getMap() );
}
public boolean isAuthenticated( DavServerRequest davRequest, HttpServletResponse response )
{
if ( ConfigurationNames.isRepositories( propertyName ) )
{
+ updateRepositoryMap();
+
getDavManager().removeAllServers();
try