aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--archiva-modules/archiva-base/archiva-converter/src/main/java/org/apache/archiva/converter/legacy/LegacyConverterArtifactConsumer.java9
-rw-r--r--archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java61
-rw-r--r--archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RelocatablePath.java41
-rw-r--r--archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RequestPathMapper.java35
-rw-r--r--archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java131
-rw-r--r--archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java357
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/pom.xml6
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/AbstractDefaultRepositoryContent.java1
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java149
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/MavenContentProvider.java7
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/ManagedDefaultRepositoryContentTest.java7
-rw-r--r--archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/content/maven2/MavenRepositoryRequestInfoTest.java7
-rw-r--r--archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java268
-rw-r--r--archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java105
-rw-r--r--archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/ArchivaDavResourceFactoryTest.java7
15 files changed, 975 insertions, 216 deletions
diff --git a/archiva-modules/archiva-base/archiva-converter/src/main/java/org/apache/archiva/converter/legacy/LegacyConverterArtifactConsumer.java b/archiva-modules/archiva-base/archiva-converter/src/main/java/org/apache/archiva/converter/legacy/LegacyConverterArtifactConsumer.java
index 13a79dd64..bddec98e5 100644
--- a/archiva-modules/archiva-base/archiva-converter/src/main/java/org/apache/archiva/converter/legacy/LegacyConverterArtifactConsumer.java
+++ b/archiva-modules/archiva-base/archiva-converter/src/main/java/org/apache/archiva/converter/legacy/LegacyConverterArtifactConsumer.java
@@ -19,6 +19,7 @@ package org.apache.archiva.converter.legacy;
* under the License.
*/
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
import org.apache.archiva.configuration.FileTypes;
@@ -71,6 +72,9 @@ public class LegacyConverterArtifactConsumer
@Inject
private FileTypes fileTypes;
+ @Inject
+ private FileLockManager fileLockManager;
+
private ArtifactFactory artifactFactory;
private ManagedRepositoryContent managedRepository;
@@ -93,11 +97,10 @@ public class LegacyConverterArtifactConsumer
}
@Override
- public void beginScan( org.apache.archiva.repository.ManagedRepository repository, Date whenGathered )
+ public void beginScan( ManagedRepository repository, Date whenGathered )
throws ConsumerException
{
- this.managedRepository = new ManagedDefaultRepositoryContent(artifactMappingProviders, fileTypes);
- this.managedRepository.setRepository( repository );
+ this.managedRepository = new ManagedDefaultRepositoryContent(repository, artifactMappingProviders, fileTypes, fileLockManager);
}
@Override
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java
index 816b57766..37ba9cf94 100644
--- a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java
+++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java
@@ -23,9 +23,17 @@ import org.apache.archiva.model.ArchivaArtifact;
import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference;
+import org.apache.archiva.repository.content.StorageAsset;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.nio.file.Path;
+import java.time.Instant;
+import java.util.List;
import java.util.Set;
+import java.util.function.Consumer;
+import java.util.function.Function;
/**
* ManagedRepositoryContent interface for interacting with a managed repository in an abstract way,
@@ -224,4 +232,57 @@ public interface ManagedRepositoryContent extends RepositoryContent
* @return the relative path to the artifact.
*/
String toPath( ArchivaArtifact reference );
+
+ /**
+ * Returns information about a specific storage asset.
+ * @param path
+ * @return
+ */
+ StorageAsset getAsset(String path);
+
+ /**
+ * Consumes the data and sets a lock for the file during the operation.
+ *
+ * @param asset
+ * @param consumerFunction
+ * @param readLock
+ * @throws IOException
+ */
+ void consumeData( StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException;
+
+ /**
+ * Adds a new asset to the underlying storage.
+ * @param path The path to the asset.
+ * @param container True, if the asset should be a container, false, if it is a file.
+ * @return
+ */
+ StorageAsset addAsset(String path, boolean container);
+
+ /**
+ * Removes the given asset from the storage.
+ *
+ * @param asset
+ * @throws IOException
+ */
+ void removeAsset(StorageAsset asset) throws IOException;
+
+ /**
+ * Moves the asset to the given location and returns the asset object for the destination.
+ *
+ * @param origin
+ * @param destination
+ * @return
+ */
+ StorageAsset moveAsset(StorageAsset origin, String destination) throws IOException;
+
+
+ /**
+ * Copies the given asset to the new destination.
+ *
+ * @param origin
+ * @param destination
+ * @return
+ * @throws IOException
+ */
+ StorageAsset copyAsset(StorageAsset origin, String destination) throws IOException;
}
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RelocatablePath.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RelocatablePath.java
new file mode 100644
index 000000000..055abe48b
--- /dev/null
+++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RelocatablePath.java
@@ -0,0 +1,41 @@
+package org.apache.archiva.repository;
+
+/**
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public class RelocatablePath
+{
+
+ private final String path;
+ private final String originPath;
+ private final boolean relocated;
+
+ RelocatablePath(String path, String originPath) {
+ this.path = path;
+ this.originPath = originPath;
+ this.relocated = !path.equals(originPath);
+ }
+
+ RelocatablePath(String path) {
+ this.path = path;
+ this.originPath = path;
+ this.relocated = false;
+ }
+
+ public String getPath( )
+ {
+ return path;
+ }
+
+ public String getOriginPath( )
+ {
+ return originPath;
+ }
+
+ public boolean isRelocated( )
+ {
+ return relocated;
+ }
+
+
+}
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RequestPathMapper.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RequestPathMapper.java
new file mode 100644
index 000000000..d06615fba
--- /dev/null
+++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RequestPathMapper.java
@@ -0,0 +1,35 @@
+package org.apache.archiva.repository;
+
+/**
+ *
+ * Maps request paths to native repository paths. Normally HTTP requests and the path in the repository
+ * storage should be identically.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public interface RequestPathMapper
+{
+ /**
+ * Maps a request path to a repository path. The request path should be relative
+ * to the repository. The resulting path should always start with a '/'.
+ * The returned object contains additional information, if this request
+ *
+ * @param requestPath
+ * @return
+ */
+ RelocatablePath relocatableRequestToRepository(String requestPath);
+
+
+ String requestToRepository(String requestPath);
+
+
+ /**
+ * Maps a repository path to a request path. The repository path is relative to the
+ * repository. The resulting path should always start with a '/'.
+ *
+ * @param repositoryPath
+ * @return
+ */
+ String repositoryToRequest(String repositoryPath);
+
+}
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java
new file mode 100644
index 000000000..008f096a3
--- /dev/null
+++ b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java
@@ -0,0 +1,131 @@
+package org.apache.archiva.repository.content;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Path;
+import java.time.Instant;
+import java.util.List;
+import java.util.function.Consumer;
+
+/**
+ * A instance of this interface represents information about an specific asset in a repository.
+ * The asset may be an real artifact, a directory, or a virtual asset.
+ *
+ * Each asset has a unique path relative to the repository.
+ *
+ * The implementation may read the data directly from the filesystem or underlying storage implementation.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public interface StorageAsset
+{
+ /**
+ * Returns the complete path relative to the repository to the given asset.
+ *
+ * @return A path starting with '/' that uniquely identifies the asset in the repository.
+ */
+ String getPath();
+
+ /**
+ * Returns the name of the asset. It may be just the filename.
+ * @return
+ */
+ String getName();
+
+ /**
+ * Returns the time of the last modification.
+ *
+ * @return
+ */
+ Instant getModificationTime();
+
+ /**
+ * Returns true, if this asset is a container type and contains further child assets.
+ * @return
+ */
+ boolean isContainer();
+
+ /**
+ * List the child assets.
+ *
+ * @return The list of children. If there are no children, a empty list will be returned.
+ */
+ List<StorageAsset> list();
+
+ /**
+ * The size in bytes of the asset. If the asset does not have a size, -1 should be returned.
+ *
+ * @return The size if the asset has a size, otherwise -1
+ */
+ long getSize();
+
+ /**
+ * Returns the input stream of the artifact content.
+ * It will throw a IOException, if the stream could not be created.
+ * Implementations should create a new stream instance for each invocation.
+ *
+ * @return The InputStream representing the content of the artifact.
+ * @throws IOException
+ */
+ InputStream getData() throws IOException;
+
+ /**
+ *
+ * Returns an output stream where you can write data to the asset.
+ *
+ * @param replace If true, the original data will be replaced, otherwise the data will be appended.
+ * @return The OutputStream where the data can be written.
+ * @throws IOException
+ */
+ OutputStream writeData( boolean replace) throws IOException;
+
+ /**
+ * Replaces the content. The implementation may do an atomic move operation, or keep a backup. If
+ * the operation fails, the implementation should try to restore the old data, if possible.
+ *
+ * The original file may be deleted, if the storage was successful.
+ *
+ * @param newData Replaces the data by the content of the given file.
+ */
+ boolean storeDataFile( Path newData) throws IOException;
+
+ /**
+ * Returns true, if the asset exists.
+ *
+ * @return True, if the asset exists, otherwise false.
+ */
+ boolean exists();
+
+ /**
+ * Creates the asset in the underlying storage, if it does not exist.
+ */
+ void create() throws IOException;
+
+ /**
+ * Returns the real path to the asset, if it exist. Not all implementations may implement this method.
+ *
+ * @return The filesystem path to the asset.
+ * @throws UnsupportedOperationException
+ */
+ Path getFilePath() throws UnsupportedOperationException;
+}
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java
new file mode 100644
index 000000000..ef3aad306
--- /dev/null
+++ b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java
@@ -0,0 +1,357 @@
+package org.apache.archiva.repository.content;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.OpenOption;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.StandardOpenOption;
+import java.nio.file.attribute.AclEntry;
+import java.nio.file.attribute.AclEntryPermission;
+import java.nio.file.attribute.AclEntryType;
+import java.nio.file.attribute.AclFileAttributeView;
+import java.nio.file.attribute.PosixFileAttributeView;
+import java.nio.file.attribute.PosixFilePermission;
+import java.nio.file.attribute.PosixFilePermissions;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public class FilesystemAsset implements StorageAsset
+{
+
+ private final static Logger log = LoggerFactory.getLogger( FilesystemAsset.class );
+
+ private final Path basePath;
+ private final Path assetPath;
+ private final Path completeAssetPath;
+
+ public String DEFAULT_POSIX_FILE_PERMS = "rw-rw----";
+ public String DEFAULT_POSIX_DIR_PERMS = "rwxrwx---";
+
+ List<AclEntry> defaultFileAcls;
+ Set<PosixFilePermission> defaultPosixFilePermissions;
+ List<AclEntry> defaultDirectoryAcls;
+ Set<PosixFilePermission> defaultPosixDirectoryPermissions;
+
+ boolean supportsAcl = false;
+ boolean supportsPosix = false;
+
+ boolean directory = false;
+
+ public FilesystemAsset( Path basePath, String assetPath )
+ {
+ this.basePath = basePath;
+ this.assetPath = Paths.get( assetPath );
+ this.completeAssetPath = basePath.resolve( assetPath ).toAbsolutePath( );
+ init( );
+ }
+
+ public FilesystemAsset( Path basePath, String assetPath, boolean directory )
+ {
+ this.basePath = basePath;
+ this.assetPath = Paths.get( assetPath );
+ this.completeAssetPath = basePath.resolve( assetPath ).toAbsolutePath( );
+ this.directory = directory;
+ init( );
+ }
+
+ private void init( )
+ {
+ defaultFileAcls = new ArrayList<>( );
+ AclEntry.Builder aclBuilder = AclEntry.newBuilder( );
+ aclBuilder.setPermissions( AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
+ AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA );
+ aclBuilder.setType( AclEntryType.ALLOW );
+ defaultFileAcls.add( aclBuilder.build( ) );
+ AclEntry.Builder aclDirBuilder = AclEntry.newBuilder( );
+ aclDirBuilder.setPermissions( AclEntryPermission.ADD_FILE, AclEntryPermission.ADD_SUBDIRECTORY, AclEntryPermission.DELETE_CHILD,
+ AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
+ AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA );
+ aclDirBuilder.setType( AclEntryType.ALLOW );
+ defaultDirectoryAcls.add( aclDirBuilder.build( ) );
+
+ defaultPosixFilePermissions = PosixFilePermissions.fromString( DEFAULT_POSIX_FILE_PERMS );
+ defaultPosixDirectoryPermissions = PosixFilePermissions.fromString( DEFAULT_POSIX_DIR_PERMS );
+
+ try
+ {
+ supportsAcl = Files.getFileStore( completeAssetPath ).supportsFileAttributeView( AclFileAttributeView.class );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not check filesystem capabilities {}", e.getMessage( ) );
+ }
+ try
+ {
+ supportsPosix = Files.getFileStore( completeAssetPath ).supportsFileAttributeView( PosixFileAttributeView.class );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not check filesystem capabilities {}", e.getMessage( ) );
+ }
+
+ }
+
+
+ @Override
+ public String getPath( )
+ {
+ return assetPath.toString( );
+ }
+
+ @Override
+ public String getName( )
+ {
+ return assetPath.getFileName( ).toString( );
+ }
+
+ @Override
+ public Instant getModificationTime( )
+ {
+ try
+ {
+ return Files.getLastModifiedTime( completeAssetPath ).toInstant( );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not read modification time of {}", completeAssetPath );
+ return Instant.now( );
+ }
+ }
+
+ @Override
+ public boolean isContainer( )
+ {
+ return Files.isDirectory( completeAssetPath );
+ }
+
+ @Override
+ public List<StorageAsset> list( )
+ {
+ try
+ {
+ return Files.list( completeAssetPath ).map( p -> new FilesystemAsset( basePath, basePath.relativize( p ).toString( ) ) )
+ .collect( Collectors.toList( ) );
+ }
+ catch ( IOException e )
+ {
+ return Collections.EMPTY_LIST;
+ }
+ }
+
+ @Override
+ public long getSize( )
+ {
+ try
+ {
+ return Files.size( completeAssetPath );
+ }
+ catch ( IOException e )
+ {
+ return -1;
+ }
+ }
+
+ @Override
+ public InputStream getData( ) throws IOException
+ {
+ return Files.newInputStream( completeAssetPath );
+ }
+
+ @Override
+ public OutputStream writeData( boolean replace ) throws IOException
+ {
+ OpenOption[] options;
+ if ( replace )
+ {
+ options = new OpenOption[]{StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE};
+ }
+ else
+ {
+ options = new OpenOption[]{StandardOpenOption.APPEND};
+ }
+ return Files.newOutputStream( completeAssetPath, options );
+ }
+
+ @Override
+ public boolean storeDataFile( Path newData ) throws IOException
+ {
+ final boolean createNew = !Files.exists( completeAssetPath );
+ Path backup = null;
+ if ( !createNew )
+ {
+ backup = findBackupFile( completeAssetPath );
+ }
+ try
+ {
+ if ( !createNew )
+ {
+ Files.move( completeAssetPath, backup );
+ }
+ Files.move( newData, completeAssetPath, StandardCopyOption.REPLACE_EXISTING );
+ setDefaultPermissions( completeAssetPath );
+ return true;
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not overwrite file {}", completeAssetPath );
+ // Revert if possible
+ if ( backup != null && Files.exists( backup ) )
+ {
+ Files.move( backup, completeAssetPath, StandardCopyOption.REPLACE_EXISTING );
+ }
+ throw e;
+ }
+ finally
+ {
+ if ( backup != null )
+ {
+ try
+ {
+ Files.deleteIfExists( backup );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not delete backup file {}", backup );
+ }
+ }
+ }
+
+ }
+
+ private void setDefaultPermissions(Path filePath) {
+ try
+ {
+ if ( supportsPosix )
+ {
+ Set<PosixFilePermission> perms;
+ if ( Files.isDirectory( filePath ) )
+ {
+ perms = defaultPosixFilePermissions;
+ }
+ else
+ {
+ perms = defaultPosixDirectoryPermissions;
+ }
+ Files.setPosixFilePermissions( filePath, perms );
+ }
+ else if ( supportsAcl )
+ {
+ List<AclEntry> perms;
+ if ( Files.isDirectory( filePath ) )
+ {
+ perms = defaultDirectoryAcls;
+ }
+ else
+ {
+ perms = defaultFileAcls;
+ }
+ AclFileAttributeView aclAttr = Files.getFileAttributeView( filePath, AclFileAttributeView.class );
+ aclAttr.setAcl( perms );
+ }
+ } catch (IOException e) {
+ log.error("Could not set permissions for {}: {}", filePath, e.getMessage());
+ }
+ }
+
+ private Path findBackupFile( Path file )
+ {
+ String ext = ".bak";
+ Path backupPath = file.getParent( ).resolve( file.getFileName( ).toString( ) + ext );
+ int idx = 0;
+ while ( Files.exists( backupPath ) )
+ {
+ backupPath = file.getParent( ).resolve( file.getFileName( ).toString( ) + ext + idx++ );
+ }
+ return backupPath;
+ }
+
+ @Override
+ public boolean exists( )
+ {
+ return Files.exists( completeAssetPath );
+ }
+
+ @Override
+ public Path getFilePath( ) throws UnsupportedOperationException
+ {
+ return completeAssetPath;
+ }
+
+
+ public void setDefaultFileAcls( List<AclEntry> acl )
+ {
+ defaultFileAcls = acl;
+ }
+
+ public List<AclEntry> getDefaultFileAcls( )
+ {
+ return defaultFileAcls;
+ }
+
+ public void setDefaultPosixFilePermissions( Set<PosixFilePermission> perms )
+ {
+ defaultPosixFilePermissions = perms;
+ }
+
+ public Set<PosixFilePermission> getDefaultPosixFilePermissions( )
+ {
+ return defaultPosixFilePermissions;
+ }
+
+ public void setDefaultDirectoryAcls( List<AclEntry> acl )
+ {
+ defaultDirectoryAcls = acl;
+ }
+
+ public List<AclEntry> getDefaultDirectoryAcls( )
+ {
+ return defaultDirectoryAcls;
+ }
+
+ public void setDefaultPosixDirectoryPermissions( Set<PosixFilePermission> perms )
+ {
+ defaultPosixDirectoryPermissions = perms;
+ }
+
+ public Set<PosixFilePermission> getDefaultPosixDirectoryPermissions( )
+ {
+ return defaultPosixDirectoryPermissions;
+ }
+
+ @Override
+ public void create( ) throws IOException
+ {
+ if ( !Files.exists( completeAssetPath ) )
+ {
+ if ( directory )
+ {
+ Files.createDirectories( completeAssetPath );
+ } else {
+ Files.createFile( completeAssetPath );
+ }
+ setDefaultPermissions( completeAssetPath );
+ }
+ }
+
+ @Override
+ public String toString( )
+ {
+ return assetPath.toString();
+ }
+
+
+}
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/pom.xml b/archiva-modules/archiva-maven/archiva-maven-repository/pom.xml
index 7eba9e946..e9cd8d052 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/pom.xml
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/pom.xml
@@ -136,6 +136,12 @@
<artifactId>modelmapper</artifactId>
</dependency>
<dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+
+
+ <dependency>
<groupId>org.apache.maven.wagon</groupId>
<artifactId>wagon-http</artifactId>
<scope>provided</scope>
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/AbstractDefaultRepositoryContent.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/AbstractDefaultRepositoryContent.java
index f3cdb190f..54ae9da7c 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/AbstractDefaultRepositoryContent.java
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/AbstractDefaultRepositoryContent.java
@@ -72,6 +72,7 @@ public abstract class AbstractDefaultRepositoryContent implements RepositoryCont
this.artifactMappingProviders = artifactMappingProviders;
}
+ @Override
public ArtifactReference toArtifactReference( String path )
throws LayoutException
{
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java
index d5f55fc6e..e6da2c147 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java
@@ -19,6 +19,10 @@ package org.apache.archiva.repository.content.maven2;
* under the License.
*/
+import org.apache.archiva.common.filelock.FileLockException;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.filelock.FileLockTimeoutException;
+import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.common.utils.PathUtil;
import org.apache.archiva.configuration.FileTypes;
import org.apache.archiva.metadata.repository.storage.maven2.ArtifactMappingProvider;
@@ -28,12 +32,19 @@ import org.apache.archiva.model.ArtifactReference;
import org.apache.archiva.model.ProjectReference;
import org.apache.archiva.model.VersionedReference;
import org.apache.archiva.repository.ContentNotFoundException;
+import org.apache.archiva.repository.EditableManagedRepository;
import org.apache.archiva.repository.LayoutException;
+import org.apache.archiva.repository.ManagedRepository;
import org.apache.archiva.repository.ManagedRepositoryContent;
import org.apache.archiva.repository.RepositoryException;
+import org.apache.archiva.repository.content.FilesystemAsset;
+import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
+import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -43,6 +54,7 @@ import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
+import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -54,26 +66,38 @@ public class ManagedDefaultRepositoryContent
implements ManagedRepositoryContent
{
+ private final FileLockManager fileLockManager;
+
private FileTypes filetypes;
public void setFileTypes(FileTypes fileTypes) {
this.filetypes = fileTypes;
}
+ private ManagedRepository repository;
+ private Path repoDir;
- private org.apache.archiva.repository.ManagedRepository repository;
-
- public ManagedDefaultRepositoryContent(FileTypes fileTypes) {
+ public ManagedDefaultRepositoryContent(ManagedRepository repository, FileTypes fileTypes, FileLockManager lockManager) {
super(Collections.singletonList( new DefaultArtifactMappingProvider() ));
setFileTypes( fileTypes );
+ this.fileLockManager = lockManager;
+ setRepository( repository );
}
- public ManagedDefaultRepositoryContent( List<? extends ArtifactMappingProvider> artifactMappingProviders, FileTypes fileTypes )
+
+ public ManagedDefaultRepositoryContent( ManagedRepository repository, List<? extends ArtifactMappingProvider> artifactMappingProviders, FileTypes fileTypes, FileLockManager lockManager )
{
super(artifactMappingProviders==null ? Collections.singletonList( new DefaultArtifactMappingProvider() ) : artifactMappingProviders);
setFileTypes( fileTypes );
+ this.fileLockManager = lockManager;
+ setRepository( repository );
+ }
+
+ private Path getRepoDir() {
+ return repoDir;
}
+
@Override
public void deleteVersion( VersionedReference reference )
{
@@ -246,7 +270,6 @@ public class ManagedDefaultRepositoryContent
*
* @return the Set of available versions, based on the project reference.
* @throws LayoutException
- * @throws LayoutException
*/
@Override
public Set<String> getVersions( ProjectReference reference )
@@ -386,9 +409,14 @@ public class ManagedDefaultRepositoryContent
}
@Override
- public void setRepository( org.apache.archiva.repository.ManagedRepository repository )
+ public void setRepository( ManagedRepository repo )
{
- this.repository = repository;
+ this.repository = repo;
+ this.repoDir = PathUtil.getPathFromUri( repository.getLocation() );
+ if (repository instanceof EditableManagedRepository ) {
+ ((EditableManagedRepository)repository).setContent(this);
+ }
+
}
/**
@@ -502,4 +530,111 @@ public class ManagedDefaultRepositoryContent
{
this.filetypes = filetypes;
}
+
+
+ @Override
+ public void consumeData( StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
+ {
+ final Path path = asset.getFilePath();
+ try {
+ if (readLock) {
+ consumeDataLocked( path, consumerFunction );
+ } else
+ {
+ try ( InputStream is = Files.newInputStream( path ) )
+ {
+ consumerFunction.accept( is );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not read the input stream from file {}", path);
+ throw e;
+ }
+ }
+ } catch (RuntimeException e)
+ {
+ log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
+ throw new IOException( e );
+ }
+
+ }
+
+ public void consumeDataLocked( Path file, Consumer<InputStream> consumerFunction) throws IOException
+ {
+
+ final Lock lock;
+ try
+ {
+ lock = fileLockManager.readFileLock( file );
+ try ( InputStream is = Files.newInputStream( lock.getFile()))
+ {
+ consumerFunction.accept( is );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not read the input stream from file {}", file);
+ throw e;
+ } finally
+ {
+ fileLockManager.release( lock );
+ }
+ }
+ catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
+ {
+ log.error("Locking error on file {}", file);
+ throw new IOException(e);
+ }
+ }
+
+
+ @Override
+ public StorageAsset getAsset( String path )
+ {
+ final Path repoPath = getRepoDir();
+ return new FilesystemAsset( repoPath, path);
+ }
+
+ @Override
+ public StorageAsset addAsset( String path, boolean container )
+ {
+ final Path repoPath = getRepoDir();
+ FilesystemAsset asset = new FilesystemAsset( repoPath, path , container);
+ return asset;
+ }
+
+ @Override
+ public void removeAsset( StorageAsset asset ) throws IOException
+ {
+ Files.delete(asset.getFilePath());
+ }
+
+ @Override
+ public StorageAsset moveAsset( StorageAsset origin, String destination ) throws IOException
+ {
+ final Path repoPath = getRepoDir();
+ boolean container = origin.isContainer();
+ FilesystemAsset newAsset = new FilesystemAsset( repoPath, destination, container );
+ Files.move(origin.getFilePath(), newAsset.getFilePath());
+ return newAsset;
+ }
+
+ @Override
+ public StorageAsset copyAsset( StorageAsset origin, String destination ) throws IOException
+ {
+ final Path repoPath = getRepoDir();
+ boolean container = origin.isContainer();
+ FilesystemAsset newAsset = new FilesystemAsset( repoPath, destination, container );
+ if (Files.exists(newAsset.getFilePath())) {
+ throw new IOException("Destination file exists already "+ newAsset.getFilePath());
+ }
+ if (Files.isDirectory( origin.getFilePath() ))
+ {
+ FileUtils.copyDirectory(origin.getFilePath( ).toFile(), newAsset.getFilePath( ).toFile() );
+ } else if (Files.isRegularFile( origin.getFilePath() )) {
+ FileUtils.copyFile(origin.getFilePath( ).toFile(), newAsset.getFilePath( ).toFile() );
+ }
+ return newAsset;
+ }
+
+
}
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/MavenContentProvider.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/MavenContentProvider.java
index 27e0592d7..fd2e8a6c3 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/MavenContentProvider.java
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/MavenContentProvider.java
@@ -19,6 +19,7 @@ package org.apache.archiva.repository.content.maven2;
* under the License.
*/
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.configuration.FileTypes;
import org.apache.archiva.metadata.repository.storage.maven2.ArtifactMappingProvider;
import org.apache.archiva.repository.ManagedRepository;
@@ -51,6 +52,9 @@ public class MavenContentProvider implements RepositoryContentProvider
private FileTypes filetypes;
@Inject
+ private FileLockManager fileLockManager;
+
+ @Inject
protected List<? extends ArtifactMappingProvider> artifactMappingProviders;
@@ -100,8 +104,7 @@ public class MavenContentProvider implements RepositoryContentProvider
if (!supportsLayout( repository.getLayout() )) {
throw new RepositoryException( "Repository layout "+repository.getLayout()+" is not supported by this implementation." );
}
- ManagedDefaultRepositoryContent content = new ManagedDefaultRepositoryContent(artifactMappingProviders, filetypes);
- content.setRepository( repository );
+ ManagedDefaultRepositoryContent content = new ManagedDefaultRepositoryContent(repository, artifactMappingProviders, filetypes ,fileLockManager);
return content;
}
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/ManagedDefaultRepositoryContentTest.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/ManagedDefaultRepositoryContentTest.java
index 59c15a21d..6b448a1c9 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/ManagedDefaultRepositoryContentTest.java
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/ManagedDefaultRepositoryContentTest.java
@@ -19,6 +19,7 @@ package org.apache.archiva.metadata.repository.storage.maven2;
* under the License.
*/
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.VersionComparator;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.FileType;
@@ -64,6 +65,9 @@ public class ManagedDefaultRepositoryContentTest
@Inject
List<? extends ArtifactMappingProvider> artifactMappingProviders;
+ @Inject
+ FileLockManager fileLockManager;
+
@Before
public void setUp()
throws Exception
@@ -78,9 +82,8 @@ public class ManagedDefaultRepositoryContentTest
fileTypes.afterConfigurationChange( null, "fileType", null );
- repoContent = new ManagedDefaultRepositoryContent(artifactMappingProviders, fileTypes);
+ repoContent = new ManagedDefaultRepositoryContent(repository, artifactMappingProviders, fileTypes, fileLockManager);
//repoContent = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
- repoContent.setRepository( repository );
}
@Test
diff --git a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/content/maven2/MavenRepositoryRequestInfoTest.java b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/content/maven2/MavenRepositoryRequestInfoTest.java
index 95bbf0f3a..783970784 100644
--- a/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/content/maven2/MavenRepositoryRequestInfoTest.java
+++ b/archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/content/maven2/MavenRepositoryRequestInfoTest.java
@@ -19,6 +19,7 @@ package org.apache.archiva.repository.content.maven2;
* under the License.
*/
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.utils.FileUtils;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.FileType;
@@ -67,6 +68,9 @@ public class MavenRepositoryRequestInfoTest
@Inject
List<? extends ArtifactMappingProvider> artifactMappingProviders;
+ @Inject
+ FileLockManager fileLockManager;
+
private MavenRepositoryRequestInfo repoRequest;
@@ -92,9 +96,8 @@ public class MavenRepositoryRequestInfoTest
fileTypes.afterConfigurationChange( null, "fileType", null );
- ManagedDefaultRepositoryContent repoContent = new ManagedDefaultRepositoryContent(artifactMappingProviders, fileTypes);
+ ManagedDefaultRepositoryContent repoContent = new ManagedDefaultRepositoryContent(repository, artifactMappingProviders, fileTypes, fileLockManager);
//repoContent = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
- repoContent.setRepository( repository );
repository.setContent(repoContent);
repoRequest = new MavenRepositoryRequestInfo(repository);
}
diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java
index 21c589a87..82697485d 100644
--- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java
+++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java
@@ -19,12 +19,13 @@ package org.apache.archiva.webdav;
* under the License.
*/
-import org.apache.archiva.common.filelock.FileLockException;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.common.filelock.FileLockTimeoutException;
-import org.apache.archiva.common.filelock.Lock;
+import edu.emory.mathcs.backport.java.util.Collections;
import org.apache.archiva.metadata.model.facets.AuditEvent;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
+import org.apache.archiva.repository.LayoutException;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.content.StorageAsset;
import org.apache.archiva.repository.events.AuditListener;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
@@ -57,8 +58,6 @@ import org.apache.jackrabbit.webdav.property.DavPropertySet;
import org.apache.jackrabbit.webdav.property.DefaultDavProperty;
import org.apache.jackrabbit.webdav.property.ResourceType;
import org.joda.time.DateTime;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -68,9 +67,12 @@ import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
@@ -84,7 +86,7 @@ public class ArchivaDavResource
private final DavResourceFactory factory;
- private final Path localResource;
+ // private final Path localResource;
private final String logicalResource;
@@ -96,7 +98,7 @@ public class ArchivaDavResource
private String remoteAddr;
- private final org.apache.archiva.repository.ManagedRepository repository;
+ private final ManagedRepository repository;
private final MimeTypes mimeTypes;
@@ -108,16 +110,17 @@ public class ArchivaDavResource
private final ArchivaTaskScheduler<RepositoryTask> scheduler;
- private final FileLockManager fileLockManager;
-
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
- public ArchivaDavResource( String localResource, String logicalResource, org.apache.archiva.repository.ManagedRepository repository,
+ private StorageAsset asset;
+
+ public ArchivaDavResource( StorageAsset localResource, String logicalResource, ManagedRepository repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
- RepositoryArchivaTaskScheduler scheduler, FileLockManager fileLockManager )
+ RepositoryArchivaTaskScheduler scheduler) throws LayoutException
{
- this.localResource = Paths.get( localResource );
+ // this.localResource = Paths.get( localResource );
+ this.asset = localResource;
this.logicalResource = logicalResource;
this.locator = locator;
this.factory = factory;
@@ -130,22 +133,25 @@ public class ArchivaDavResource
this.mimeTypes = mimeTypes;
this.auditListeners = auditListeners;
this.scheduler = scheduler;
- this.fileLockManager = fileLockManager;
+
}
- public ArchivaDavResource( String localResource, String logicalResource, org.apache.archiva.repository.ManagedRepository repository,
+ public ArchivaDavResource( StorageAsset localResource, String logicalResource, ManagedRepository repository,
String remoteAddr, String principal, DavSession session,
ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes,
- List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler,
- FileLockManager fileLockManager )
+ List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler) throws LayoutException
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
- scheduler, fileLockManager );
+ scheduler );
this.remoteAddr = remoteAddr;
this.principal = principal;
}
+ private ManagedRepositoryContent getContent() {
+ return repository.getContent();
+ }
+
@Override
public String getComplianceClass()
{
@@ -161,13 +167,13 @@ public class ArchivaDavResource
@Override
public boolean exists()
{
- return Files.exists(localResource);
+ return asset.exists();
}
@Override
public boolean isCollection()
{
- return Files.isDirectory(localResource);
+ return asset.isContainer();
}
@Override
@@ -183,11 +189,6 @@ public class ArchivaDavResource
return locator;
}
- public Path getLocalResource()
- {
- return localResource;
- }
-
@Override
public String getResourcePath()
{
@@ -203,15 +204,7 @@ public class ArchivaDavResource
@Override
public long getModificationTime()
{
- try
- {
- return Files.getLastModifiedTime(localResource).toMillis();
- }
- catch ( IOException e )
- {
- log.error("Could not get modification time of {}: {}", localResource, e.getMessage(), e);
- return 0;
- }
+ return asset.getModificationTime().toEpochMilli();
}
@Override
@@ -220,33 +213,29 @@ public class ArchivaDavResource
{
if ( !isCollection() )
{
- outputContext.setContentLength( Files.size( localResource ) );
- outputContext.setContentType( mimeTypes.getMimeType( localResource.getFileName().toString() ) );
+ outputContext.setContentLength( asset.getSize());
+ outputContext.setContentType( mimeTypes.getMimeType( asset.getName() ) );
}
- try
+ if ( !isCollection() && outputContext.hasStream() )
{
- if ( !isCollection() && outputContext.hasStream() )
- {
- Lock lock = fileLockManager.readFileLock( localResource );
- try (InputStream is = Files.newInputStream( lock.getFile()))
- {
- IOUtils.copy( is, outputContext.getOutputStream() );
- }
- }
- else if ( outputContext.hasStream() )
- {
- IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
- writer.write( outputContext );
- }
+ getContent().consumeData( asset, is -> {copyStream(is, outputContext.getOutputStream());}, true );
}
- catch ( FileLockException e )
+ else if ( outputContext.hasStream() )
+ {
+ IndexWriter writer = new IndexWriter( getContent(), asset, logicalResource );
+ writer.write( outputContext );
+ }
+ }
+
+ private void copyStream(InputStream is, OutputStream os) throws RuntimeException {
+ try
{
- throw new IOException( e.getMessage(), e );
+ IOUtils.copy(is, os);
}
- catch ( FileLockTimeoutException e )
+ catch ( IOException e )
{
- throw new IOException( e.getMessage(), e );
+ throw new RuntimeException( "Copy failed "+e.getMessage(), e );
}
}
@@ -323,57 +312,75 @@ public class ArchivaDavResource
public void addMember( DavResource resource, InputContext inputContext )
throws DavException
{
- Path localFile = localResource.resolve( resource.getDisplayName() );
- boolean exists = Files.exists(localFile);
+ // Path localFile = localResource.resolve( resource.getDisplayName() );
+ boolean exists = asset.exists();
+ final String newPath = asset.getPath()+"/"+resource.getDisplayName();
if ( isCollection() && inputContext.hasStream() ) // New File
{
- try (OutputStream stream = Files.newOutputStream( localFile ))
- {
- IOUtils.copy( inputContext.getInputStream(), stream );
- }
- catch ( IOException e )
- {
- throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e );
- }
-
- // TODO: a bad deployment shouldn't delete an existing file - do we need to write to a temporary location first?
- long expectedContentLength = inputContext.getContentLength();
- long actualContentLength = 0;
+ Path tempFile = null;
try
{
- actualContentLength = Files.size(localFile);
+ tempFile = Files.createTempFile( "archiva_upload","dat" );
+ try(OutputStream os = Files.newOutputStream( tempFile, StandardOpenOption.CREATE ))
+ {
+ IOUtils.copy( inputContext.getInputStream( ), os );
+ }
+ long expectedContentLength = inputContext.getContentLength();
+ long actualContentLength = 0;
+ try
+ {
+ actualContentLength = Files.size(tempFile);
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not get length of file {}: {}", tempFile, e.getMessage(), e );
+ }
+ // length of -1 is given for a chunked request or unknown length, in which case we accept what was uploaded
+ if ( expectedContentLength >= 0 && expectedContentLength != actualContentLength )
+ {
+ String msg = "Content Header length was " + expectedContentLength + " but was " + actualContentLength;
+ log.debug( "Upload failed: {}", msg );
+ throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
+ }
+ StorageAsset member = getContent( ).addAsset( newPath, false );
+ member.create();
+ member.storeDataFile( tempFile );
}
catch ( IOException e )
{
- log.error( "Could not get length of file {}: {}", localFile, e.getMessage(), e );
- }
- // length of -1 is given for a chunked request or unknown length, in which case we accept what was uploaded
- if ( expectedContentLength >= 0 && expectedContentLength != actualContentLength )
- {
- String msg = "Content Header length was " + expectedContentLength + " but was " + actualContentLength;
- log.debug( "Upload failed: {}", msg );
-
- org.apache.archiva.common.utils.FileUtils.deleteQuietly( localFile );
- throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
+ throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e );
+ } finally {
+ if (tempFile!=null)
+ {
+ try
+ {
+ Files.deleteIfExists( tempFile );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not delete temporary file {}", tempFile);
+ }
+ }
}
- queueRepositoryTask( localFile );
+ // queueRepositoryTask( asset );
log.debug( "File '{}{}(current user '{}')", resource.getDisplayName(),
( exists ? "' modified " : "' created " ), this.principal );
- triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
+ // triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
}
else if ( !inputContext.hasStream() && isCollection() ) // New directory
{
try
{
- Files.createDirectories( localFile );
+ StorageAsset member = getContent( ).addAsset( newPath, true );
+ member.create();
}
catch ( IOException e )
{
- log.error("Could not create directory {}: {}", localFile, e.getMessage(), e);
+ log.error("Could not create directory {}: {}", newPath, e.getMessage(), e);
}
log.debug( "Directory '{}' (current user '{}')", resource.getDisplayName(), this.principal );
@@ -389,41 +396,33 @@ public class ArchivaDavResource
}
}
+ public StorageAsset getAsset() {
+ return asset;
+ }
+
@Override
public DavResourceIterator getMembers()
{
- List<DavResource> list = new ArrayList<>();
+ List<DavResource> list;
if ( exists() && isCollection() )
{
- try ( Stream<Path> stream = Files.list(localResource))
- {
- stream.forEach ( p ->
- {
- String item = p.toString();
+ list = asset.list().stream().filter( m -> !m.getName().startsWith( HIDDEN_PATH_PREFIX ) )
+ .map(m -> {
+ String path = locator.getResourcePath( ) + '/' + m.getName();
+ DavResourceLocator resourceLocator =
+ locator.getFactory( ).createResourceLocator( locator.getPrefix( ), path );
try
{
- if ( !item.startsWith( HIDDEN_PATH_PREFIX ) )
- {
- String path = locator.getResourcePath( ) + '/' + item;
- DavResourceLocator resourceLocator =
- locator.getFactory( ).createResourceLocator( locator.getPrefix( ), path );
- DavResource resource = factory.createResource( resourceLocator, session );
-
- if ( resource != null )
- {
- list.add( resource );
- }
- log.debug( "Resource '{}' retrieved by '{}'", item, this.principal );
- }
+ return factory.createResource( resourceLocator, session );
}
catch ( DavException e )
{
- // Should not occur
+ return null;
}
- });
- } catch (IOException e) {
- log.error("Error while listing {}", localResource);
- }
+
+ }).filter( Objects::nonNull ).collect( Collectors.toList());
+ } else {
+ list = Collections.emptyList( );
}
return new DavResourceIteratorImpl( list );
}
@@ -432,24 +431,24 @@ public class ArchivaDavResource
public void removeMember( DavResource member )
throws DavException
{
- Path resource = checkDavResourceIsArchivaDavResource( member ).getLocalResource();
+ StorageAsset resource = checkDavResourceIsArchivaDavResource( member ).getAsset( );
- if ( Files.exists(resource) )
+ if ( resource.exists() )
{
try
{
- if ( Files.isDirectory(resource) )
+ if ( resource.isContainer() )
{
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( resource );
+ getContent().removeAsset( resource );
triggerAuditEvent( member, AuditEvent.REMOVE_DIR );
}
else
{
- Files.deleteIfExists( resource );
+ getContent().removeAsset( resource );
triggerAuditEvent( member, AuditEvent.REMOVE_FILE );
}
- log.debug( "{}{}' removed (current user '{}')", ( Files.isDirectory(resource) ? "Directory '" : "File '" ),
+ log.debug( "{}{}' removed (current user '{}')", ( resource.isContainer() ? "Directory '" : "File '" ),
member.getDisplayName(), this.principal );
}
@@ -493,19 +492,17 @@ public class ArchivaDavResource
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
if ( isCollection() )
{
- FileUtils.moveDirectory( getLocalResource().toFile(), resource.getLocalResource().toFile() );
-
+ this.asset = getContent().moveAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_DIRECTORY );
}
else
{
- FileUtils.moveFile( getLocalResource().toFile(), resource.getLocalResource().toFile() );
-
+ this.asset = getContent().moveAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE );
}
log.debug( "{}{}' moved to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ),
- getLocalResource().getFileName(), destination, this.principal );
+ asset.getPath(), destination, this.principal );
}
catch ( IOException e )
@@ -533,19 +530,19 @@ public class ArchivaDavResource
ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination );
if ( isCollection() )
{
- FileUtils.copyDirectory( getLocalResource().toFile(), resource.getLocalResource().toFile() );
+ getContent().copyAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_DIRECTORY );
}
else
{
- FileUtils.copyFile( getLocalResource().toFile(), resource.getLocalResource().toFile() );
+ getContent().copyAsset( asset, destination.getResourcePath() );
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE );
}
log.debug( "{}{}' copied to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ),
- getLocalResource().getFileName(), destination, this.principal );
+ asset.getPath(), destination, this.principal );
}
catch ( IOException e )
@@ -694,32 +691,11 @@ public class ArchivaDavResource
}
// Need to get the ISO8601 date for properties
- DateTime dt = null;
- try
- {
- dt = new DateTime( Files.getLastModifiedTime( localResource ).toMillis() );
- }
- catch ( IOException e )
- {
- log.error("Could not get modification time of {}: {}", localResource, e.getMessage(), e);
- dt = new DateTime();
- }
- DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
- String modifiedDate = fmt.print( dt );
-
+ String modifiedDate = DateTimeFormatter.ISO_INSTANT.format( asset.getModificationTime() );
properties.add( new DefaultDavProperty<>( DavPropertyName.GETLASTMODIFIED, modifiedDate ) );
-
properties.add( new DefaultDavProperty<>( DavPropertyName.CREATIONDATE, modifiedDate ) );
- try
- {
- properties.add( new DefaultDavProperty<>( DavPropertyName.GETCONTENTLENGTH, Files.size(localResource) ) );
- }
- catch ( IOException e )
- {
- log.error("Could not get file size of {}: {}", localResource, e.getMessage(), e);
- properties.add( new DefaultDavProperty<>( DavPropertyName.GETCONTENTLENGTH, 0 ) );
- }
+ properties.add( new DefaultDavProperty<>( DavPropertyName.GETCONTENTLENGTH, asset.getSize() ) );
this.properties = properties;
@@ -748,6 +724,7 @@ public class ArchivaDavResource
}
}
+ /**
private void queueRepositoryTask( Path localFile )
{
RepositoryTask task = new RepositoryTask();
@@ -766,4 +743,5 @@ public class ArchivaDavResource
+ "'].", localFile.getFileName() );
}
}
+ **/
}
diff --git a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java
index 3da1664b4..284633156 100644
--- a/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java
+++ b/archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java
@@ -19,6 +19,8 @@ package org.apache.archiva.webdav.util;
* under the License.
*/
+import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.content.StorageAsset;
import org.apache.commons.lang.StringUtils;
import org.apache.jackrabbit.webdav.DavResource;
import org.apache.jackrabbit.webdav.io.OutputContext;
@@ -27,18 +29,17 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.PrintWriter;
-import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.Comparator;
import java.util.Date;
-import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.stream.Collectors;
+import java.util.SortedMap;
+import java.util.TreeMap;
/**
*/
@@ -49,22 +50,45 @@ public class IndexWriter
private final String logicalResource;
- private final List<Path> localResources;
+ private final List<RepoAsset> repositoryAssets;
+
private final boolean isVirtual;
- public IndexWriter( DavResource resource, Path localResource, String logicalResource )
+ public class RepoAsset
{
- this.localResources = new ArrayList<>();
- this.localResources.add( localResource );
+ private ManagedRepositoryContent repo;
+ private StorageAsset asset;
+
+ public RepoAsset( ManagedRepositoryContent repo, StorageAsset asset) {
+ this.repo = repo;
+ this.asset = asset;
+ }
+
+ public ManagedRepositoryContent getRepo( )
+ {
+ return repo;
+ }
+
+ public StorageAsset getAsset( )
+ {
+ return asset;
+ }
+
+ }
+
+ public IndexWriter( ManagedRepositoryContent repo, StorageAsset reference, String logicalResource )
+ {
+ this.repositoryAssets = new ArrayList<>( );
+ this.repositoryAssets.add(new RepoAsset( repo, reference));
this.logicalResource = logicalResource;
this.isVirtual = false;
}
- public IndexWriter( DavResource resource, List<Path> localResources, String logicalResource )
+ public IndexWriter( List<RepoAsset> localResources, String logicalResource )
{
this.logicalResource = logicalResource;
- this.localResources = localResources;
+ this.repositoryAssets = localResources;
this.isVirtual = true;
}
@@ -152,61 +176,36 @@ public class IndexWriter
{
if ( !isVirtual )
{
- for ( Path localResource : localResources )
+ for ( RepoAsset localResource : repositoryAssets )
{
- List<Path> files = Files.list(localResource).collect( Collectors.toList( ) );
- Collections.sort( files );
-
- for ( Path file : files )
- {
- writeHyperlink( writer, file.getFileName().toString(), Files.getLastModifiedTime( file ).toMillis(), Files.size(file),
- Files.isDirectory( file ) );
- }
+ localResource.getAsset().list().stream().sorted(
+ Comparator.comparing( StorageAsset::getName )
+ ).forEach( asset -> {
+ writeHyperlink( writer, asset.getName(), asset.getModificationTime().toEpochMilli(), asset.getSize(),
+ asset.isContainer() );
+ } );
}
}
else
{
// virtual repository - filter unique directories
- Map<String, List<String>> uniqueChildFiles = new HashMap<>();
- List<String> sortedList = new ArrayList<>();
- for ( Path resource : localResources )
+ SortedMap<String, StorageAsset> uniqueChildFiles = new TreeMap<>();
+ for ( RepoAsset resource : repositoryAssets )
{
- List<Path> files = Files.list(resource).collect( Collectors.toList() );
- for ( Path file : files )
+ List<StorageAsset> files = resource.getAsset().list();
+ for ( StorageAsset file : files )
{
- List<String> mergedChildFiles = new ArrayList<>();
- if ( uniqueChildFiles.get( file.getFileName() ) == null )
- {
- mergedChildFiles.add( file.toAbsolutePath().toString() );
+ // the first entry wins
+ if (!uniqueChildFiles.containsKey( file.getName() )) {
+ uniqueChildFiles.put(file.getName(), file);
}
- else
- {
- mergedChildFiles = uniqueChildFiles.get( file.getFileName() );
- if ( !mergedChildFiles.contains( file.toAbsolutePath().toString() ) )
- {
- mergedChildFiles.add( file.toAbsolutePath().toString() );
- }
- }
- uniqueChildFiles.put( file.getFileName().toString(), mergedChildFiles );
- sortedList.add( file.getFileName().toString() );
}
}
-
- Collections.sort( sortedList );
- List<String> written = new ArrayList<>();
- for ( String fileName : sortedList )
+ for ( Map.Entry<String, StorageAsset> entry : uniqueChildFiles.entrySet())
{
- List<String> childFilesFromMap = uniqueChildFiles.get( fileName );
- for ( String childFilePath : childFilesFromMap )
- {
- Path childFile = Paths.get( childFilePath );
- if ( !written.contains( childFile.getFileName().toString() ) )
- {
- written.add( childFile.getFileName().toString() );
- writeHyperlink( writer, fileName, Files.getLastModifiedTime( childFile).toMillis(),
- Files.size(childFile), Files.isDirectory( childFile) );
- }
- }
+ final StorageAsset asset = entry.getValue();
+ writeHyperlink( writer, asset.getName(), asset.getModificationTime().toEpochMilli(),
+ asset.getSize(), asset.isContainer());
}
}
}
diff --git a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/ArchivaDavResourceFactoryTest.java b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/ArchivaDavResourceFactoryTest.java
index 6932a31c4..e153a545f 100644
--- a/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/ArchivaDavResourceFactoryTest.java
+++ b/archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/ArchivaDavResourceFactoryTest.java
@@ -27,6 +27,7 @@ import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
import org.apache.archiva.admin.repository.DefaultRepositoryCommonValidator;
import org.apache.archiva.admin.repository.group.DefaultRepositoryGroupAdmin;
import org.apache.archiva.admin.repository.managed.DefaultManagedRepositoryAdmin;
+import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -147,6 +148,9 @@ public class ArchivaDavResourceFactoryTest
List<? extends ArtifactMappingProvider> artifactMappingProviders;
@Inject
+ FileLockManager fileLockManager;
+
+ @Inject
FileTypes fileTypes;
public Path getProjectBase() {
@@ -249,9 +253,8 @@ public class ArchivaDavResourceFactoryTest
private ManagedRepositoryContent createManagedRepositoryContent( String repoId )
throws RepositoryAdminException
{
- ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent(artifactMappingProviders, fileTypes);
org.apache.archiva.repository.ManagedRepository repo = repositoryRegistry.getManagedRepository( repoId );
- repoContent.setRepository( repo );
+ ManagedRepositoryContent repoContent = new ManagedDefaultRepositoryContent(repo, artifactMappingProviders, fileTypes, fileLockManager);
if (repo!=null && repo instanceof EditableManagedRepository)
{
( (EditableManagedRepository) repo ).setContent( repoContent );