/**
* @author Olivier Lamy
*/
-@Service("fileLockManager#default")
+@Service( "fileLockManager#default" )
public class DefaultFileLockManager
implements FileLockManager
{
private int timeout = 0;
+
@Override
public Lock readFileLock( File file )
throws FileLockException, FileLockTimeoutException
}
StopWatch stopWatch = new StopWatch();
boolean acquired = false;
-
+ mkdirs( file.getParentFile() );
try
{
Lock lock = new Lock( file, false );
return new Lock( file );
}
+ mkdirs( file.getParentFile() );
+
StopWatch stopWatch = new StopWatch();
boolean acquired = false;
}
}
+ private boolean mkdirs( File directory )
+ {
+ if ( directory == null )
+ {
+ return false;
+ }
+
+ if ( directory.exists() )
+ {
+ return false;
+ }
+ if ( directory.mkdir() )
+ {
+ return true;
+ }
+
+ File canonDir = null;
+ try
+ {
+ canonDir = directory.getCanonicalFile();
+ }
+ catch ( IOException e )
+ {
+ return false;
+ }
+
+ File parentDir = canonDir.getParentFile();
+ return ( parentDir != null && ( mkdirs( parentDir ) || parentDir.exists() ) && canonDir.mkdir() );
+ }
+
public int getTimeout()
{
return timeout;
*/
public interface FileLockManager
{
+ /**
+ *
+ * @param file
+ * @return
+ * @throws FileLockException
+ * @throws FileLockTimeoutException
+ */
Lock writeFileLock( File file )
throws FileLockException, FileLockTimeoutException;
+ /**
+ *
+ * @param file
+ * @return
+ * @throws FileLockException
+ * @throws FileLockTimeoutException
+ */
Lock readFileLock( File file )
throws FileLockException, FileLockTimeoutException;
throws IOException
{
this.fileLockManager = fileLockManager;
- file.createNewFile();
+ //file.createNewFile();
}
/**
* Policy to apply before the download is attempted.
- *
- *
*/
-public interface PreDownloadPolicy extends DownloadPolicy
+public interface PreDownloadPolicy
+ extends DownloadPolicy
{
+ // no op
}
</exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-filelock</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-scanner</artifactId>
org.apache.maven.wagon.proxy,
org.apache.maven.wagon.repository,
com.google.common.io,
+ org.apache.archiva.common.filelock,
org.slf4j;resolution:=optional
</Import-Package>
</instructions>
import org.apache.archiva.admin.model.beans.ProxyConnectorRuleType;
import org.apache.archiva.admin.model.beans.RemoteRepository;
import org.apache.archiva.admin.model.networkproxy.NetworkProxyAdmin;
+import org.apache.archiva.common.filelock.FileLockException;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.filelock.FileLockTimeoutException;
+import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ConfigurationNames;
@Inject
private NetworkProxyAdmin networkProxyAdmin;
+ @Inject
+ @Named( value = "fileLockManager#default" )
+ private FileLockManager fileLockManager;
+
@PostConstruct
public void initialize()
{
catch ( RepositoryAdminException e )
{
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
- "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",targetRepository.getRepository().getId(), path,
- e.getMessage(), e );
+ "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), path, e.getMessage(), e );
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ), "Full stack trace", e );
}
}
}
catch ( ProxyException e )
{
- log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
- targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
+ log.warn(
+ "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
catch ( RepositoryAdminException e )
{
- log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
- targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
+ log.warn(
+ "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
+ targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
}
catch ( ProxyException e )
{
urlFailureCache.cacheFailure( url );
- log.warn( "Transfer failed on checksum: {} : {}",url ,e.getMessage(), e );
+ log.warn( "Transfer failed on checksum: {} : {}", url, e.getMessage(), e );
// Critical issue, pass it on.
throw e;
}
log.warn(
"Transfer error from repository {} for artifact {} , continuing to next repository. Error message: {}",
- content.getRepository().getId(), Keys.toKey( artifact), exception.getMessage() );
+ content.getRepository().getId(), Keys.toKey( artifact ), exception.getMessage() );
log.debug( "Full stack trace", exception );
}
private void moveTempToTarget( File temp, File target )
throws ProxyException
{
- if ( target.exists() && !target.delete() )
- {
- throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
- }
- target.getParentFile().mkdirs();
// TODO file lock library
- RandomAccessFile raf;
-
- if ( !temp.renameTo( target ) )
+ Lock lock = null;
+ try
{
- log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
-
- try
+ lock = fileLockManager.writeFileLock( target );
+ if ( lock.getFile().exists() && !lock.getFile().delete() )
{
- FileUtils.copyFile( temp, target );
+ throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
}
- catch ( IOException e )
+
+ lock.getFile().getParentFile().mkdirs();
+
+ if ( !temp.renameTo( lock.getFile() ) )
{
- if ( target.exists() )
+ log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
+
+ try
{
- log.debug( "Tried to copy file {} to {} but file with this name already exists.", temp.getName(),
- target.getAbsolutePath() );
+ FileUtils.copyFile( temp, lock.getFile() );
}
- else
+ catch ( IOException e )
{
- throw new ProxyException(
- "Cannot copy tmp file " + temp.getAbsolutePath() + " to its final location", e );
+ if ( lock.getFile().exists() )
+ {
+ log.debug( "Tried to copy file {} to {} but file with this name already exists.",
+ temp.getName(), lock.getFile().getAbsolutePath() );
+ }
+ else
+ {
+ throw new ProxyException(
+ "Cannot copy tmp file " + temp.getAbsolutePath() + " to its final location", e );
+ }
+ }
+ finally
+ {
+ FileUtils.deleteQuietly( temp );
}
}
- finally
- {
- FileUtils.deleteQuietly( temp );
- }
+ } catch( FileLockException e)
+ {
+ throw new ProxyException( e.getMessage(), e );
+ } catch (FileLockTimeoutException e)
+ {
+ throw new ProxyException( e.getMessage(), e );
}
}
}
catch ( ConnectionException e )
{
- log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
+ log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}
catch ( AuthenticationException e )
{
- log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
+ log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}
<artifactId>archiva-indexer</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-filelock</artifactId>
+ </dependency>
+
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-admin-api</artifactId>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-indexer</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-filelock</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
<artifactId>jackrabbit-webdav</artifactId>
org.apache.archiva.redback.*,
org.apache.archiva.redback.components.taskqueue,
org.codehaus.plexus.util*,
+ org.apache.archiva.common.filelock,
org.codehaus.redback.integration.filter.authentication,
org.slf4j;resolution:=optional
</Import-Package>
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditListener;
+import org.apache.archiva.common.filelock.FileLockException;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.filelock.FileLockTimeoutException;
+import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
public static final String COMPLIANCE_CLASS = "1, 2";
- private ArchivaTaskScheduler scheduler;
+ private final ArchivaTaskScheduler scheduler;
+
+ private final FileLockManager fileLockManager;
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepository repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryArchivaTaskScheduler scheduler )
+ RepositoryArchivaTaskScheduler scheduler, FileLockManager fileLockManager )
{
this.localResource = new File( localResource );
this.logicalResource = logicalResource;
this.mimeTypes = mimeTypes;
this.auditListeners = auditListeners;
this.scheduler = scheduler;
+ this.fileLockManager = fileLockManager;
}
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepository repository,
String remoteAddr, String principal, DavSession session,
ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes,
- List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler )
+ List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler , FileLockManager fileLockManager )
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
- scheduler );
+ scheduler, fileLockManager );
this.remoteAddr = remoteAddr;
this.principal = principal;
outputContext.setContentType( mimeTypes.getMimeType( localResource.getName() ) );
}
- if ( !isCollection() && outputContext.hasStream() )
+ try
{
- FileInputStream is = null;
- try
+ if ( !isCollection() && outputContext.hasStream() )
{
- // TODO file lock library
- // Write content to stream
- is = new FileInputStream( localResource );
- IOUtils.copy( is, outputContext.getOutputStream() );
+ Lock lock = fileLockManager.readFileLock( localResource );
+ FileInputStream is = null;
+ try
+ {
+ // Write content to stream
+ is = new FileInputStream( lock.getFile() );
+ IOUtils.copy( is, outputContext.getOutputStream() );
+ }
+ finally
+ {
+ IOUtils.closeQuietly( is );
+ fileLockManager.release( lock );
+ }
}
- finally
+ else if ( outputContext.hasStream() )
{
- IOUtils.closeQuietly( is );
+ IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
+ writer.write( outputContext );
}
}
- else if ( outputContext.hasStream() )
+ catch ( FileLockException e )
+ {
+ throw new IOException( e.getMessage(), e );
+ }
+ catch ( FileLockTimeoutException e )
{
- IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
- writer.write( outputContext );
+ throw new IOException( e.getMessage(), e );
}
}
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.audit.Auditable;
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
import org.apache.archiva.common.utils.PathUtil;
@Named( value = "archivaTaskScheduler#repository" )
private RepositoryArchivaTaskScheduler scheduler;
+ @Inject
+ @Named(value= "fileLockManager#default")
+ private FileLockManager fileLockManager;
+
private ApplicationContext applicationContext;
@Inject
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), locator.getResourcePath(), null,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
- auditListeners, scheduler );
+ auditListeners, scheduler, fileLockManager );
setHeaders( response, locator, resource );
return resource;
}
resource =
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
- archivaLocator, this, mimeTypes, auditListeners, scheduler );
+ archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
}
}
else
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
- auditListeners, scheduler );
+ auditListeners, scheduler, fileLockManager );
}
catch ( RepositoryMetadataException r )
{
File resourceFile = new File( temporaryIndexDirectory, requestedFileName );
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), requestedFileName, null,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
- archivaLocator, this, mimeTypes, auditListeners, scheduler );
+ archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
}
else
resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), path, managedRepositoryContent.getRepository(),
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
- archivaLocator, this, mimeTypes, auditListeners, scheduler );
+ archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
{
managedRepositoryContent.getRepository(),
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
- auditListeners, scheduler );
+ auditListeners, scheduler, fileLockManager );
}
catch ( LayoutException e )
{
File resourceFile = new File( managedRepositoryContent.getRepoRoot(), logicalResource );
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource,
managedRepositoryContent.getRepository(), davSession, archivaLocator,
- this, mimeTypes, auditListeners, scheduler );
+ this, mimeTypes, auditListeners, scheduler, fileLockManager );
resource.addLockManager( lockManager );
}
import junit.framework.TestCase;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.audit.AuditListener;
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.DavResource;
@Inject
private MimeTypes mimeTypes;
+ @Inject
+ private FileLockManager fileLockManager;
+
private ArchivaDavResourceLocator resourceLocator;
private DavResourceFactory resourceFactory;
private DavResource getDavResource( String logicalPath, File file )
{
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
- resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null );
+ resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null, fileLockManager );
}
@Test
{
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
- null );
+ null, fileLockManager );
}
}
}