]> source.dussan.org Git - archiva.git/commitdiff
Refactoring to StorageAsset access
authorMartin Stockhammer <martin_s@apache.org>
Sun, 28 Jul 2019 13:24:13 +0000 (15:24 +0200)
committerMartin Stockhammer <martin_s@apache.org>
Sun, 28 Jul 2019 13:24:13 +0000 (15:24 +0200)
122 files changed:
archiva-modules/archiva-base/archiva-configuration/pom.xml
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/AbstractRepositoryPurge.java
archiva-modules/archiva-base/archiva-consumers/archiva-core-consumers/src/main/java/org/apache/archiva/consumers/core/repository/DaysOldRepositoryPurge.java
archiva-modules/archiva-base/archiva-policies/pom.xml
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/AbstractUpdatePolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/CachedFailuresPolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/ChecksumPolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadErrorPolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/DownloadPolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsDownloadPolicy.java
archiva-modules/archiva-base/archiva-policies/src/main/java/org/apache/archiva/policies/PropagateErrorsOnUpdateDownloadPolicy.java
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/CachedFailuresPolicyTest.java
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ChecksumPolicyTest.java
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/ReleasePolicyTest.java
archiva-modules/archiva-base/archiva-policies/src/test/java/org/apache/archiva/policies/SnapshotsPolicyTest.java
archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/ProxyFetchResult.java
archiva-modules/archiva-base/archiva-proxy-api/src/main/java/org/apache/archiva/proxy/model/RepositoryProxyHandler.java
archiva-modules/archiva-base/archiva-proxy/src/main/java/org/apache/archiva/proxy/DefaultRepositoryProxyHandler.java
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-api/src/main/java/org/apache/archiva/admin/model/group/RepositoryGroupAdmin.java
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/main/java/org/apache/archiva/admin/repository/group/DefaultRepositoryGroupAdmin.java
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/ArchivaIndexManagerMock.java
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MavenIndexContextMock.java
archiva-modules/archiva-base/archiva-repository-admin/archiva-repository-admin-default/src/test/java/org/apache/archiva/admin/mock/MockMergedRemoteIndexesScheduler.java
archiva-modules/archiva-base/archiva-repository-api/pom.xml
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexManager.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/ArchivaIndexingContext.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/IndexMergerRequest.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/MergedRemoteIndexesScheduler.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/indexer/merger/TemporaryGroupIndex.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepository.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/ManagedRepositoryContent.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/Repository.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/RepositoryGroup.java
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java [deleted file]
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java [deleted file]
archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/features/IndexCreationFeature.java
archiva-modules/archiva-base/archiva-repository-layer/pom.xml
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultIndexMerger.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/indexer/merger/DefaultMergedRemoteIndexesScheduler.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractManagedRepository.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRemoteRepository.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepository.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/AbstractRepositoryGroup.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicManagedRepository.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/BasicRemoteRepository.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/RepositoryRegistry.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/ArtifactUtil.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java [deleted file]
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java [deleted file]
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/StorageUtil.java [deleted file]
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/MetadataTools.java
archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/metadata/RepositoryMetadataWriter.java
archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java [deleted file]
archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java [deleted file]
archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java
archiva-modules/archiva-base/archiva-repository-scanner/src/main/java/org/apache/archiva/repository/scanner/DefaultRepositoryScanner.java
archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/RepositoryScannerTest.java
archiva-modules/archiva-base/archiva-repository-scanner/src/test/java/org/apache/archiva/repository/scanner/mock/ManagedRepositoryContentMock.java
archiva-modules/archiva-base/archiva-storage-api/pom.xml [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/pom.xml [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/StorageUtil.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java [new file with mode: 0644]
archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java [new file with mode: 0644]
archiva-modules/archiva-base/pom.xml
archiva-modules/archiva-maven/archiva-maven-converter/src/main/java/org/apache/archiva/converter/legacy/DefaultLegacyRepositoryConverter.java
archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexContext.java
archiva-modules/archiva-maven/archiva-maven-indexer/src/main/java/org/apache/archiva/indexer/maven/MavenIndexManager.java
archiva-modules/archiva-maven/archiva-maven-indexer/src/test/java/org/apache/archiva/indexer/maven/MavenIndexManagerTest.java
archiva-modules/archiva-maven/archiva-maven-metadata/pom.xml
archiva-modules/archiva-maven/archiva-maven-metadata/src/main/java/org/apache/archiva/maven2/metadata/MavenMetadataReader.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/main/java/org/apache/archiva/proxy/maven/MavenRepositoryProxyHandler.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/CacheFailuresTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ChecksumTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ErrorHandlingTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/HttpProxyTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/ManagedDefaultTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/MetadataTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/proxy/SnapshotTransferTest.java
archiva-modules/archiva-maven/archiva-maven-proxy/src/test/java/org/apache/archiva/repository/mock/ManagedRepositoryContentMock.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/dependency/tree/maven2/Maven3DependencyTreeBuilder.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryPathTranslator.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/Maven2RepositoryStorage.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/metadata/repository/storage/maven2/RepositoryModelResolver.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/content/maven2/ManagedDefaultRepositoryContent.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenManagedRepository.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRemoteRepository.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryGroup.java
archiva-modules/archiva-maven/archiva-maven-repository/src/main/java/org/apache/archiva/repository/maven2/MavenRepositoryProvider.java
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/metadata/repository/storage/maven2/MavenRepositoryMetadataReaderTest.java
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/ArchivaIndexManagerMock.java
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/index/mock/MavenIndexContextMock.java
archiva-modules/archiva-maven/archiva-maven-repository/src/test/java/org/apache/archiva/repository/metadata/RepositoryMetadataReaderTest.java
archiva-modules/archiva-maven/archiva-maven-scheduler/src/test/java/org/apache/archiva/scheduler/indexing/maven/ArchivaIndexingTaskExecutorTest.java
archiva-modules/archiva-scheduler/archiva-scheduler-repository-api/src/main/java/org/apache/archiva/scheduler/repository/model/RepositoryTask.java
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/ArchivaIndexManagerMock.java
archiva-modules/archiva-scheduler/archiva-scheduler-repository/src/test/java/org/apache/archiva/mock/MavenIndexContextMock.java
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/pom.xml
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultBrowseService.java
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/DefaultRepositoriesService.java
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/main/java/org/apache/archiva/rest/services/utils/ArtifactBuilder.java
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/ArtifactContentEntriesTests.java
archiva-modules/archiva-web/archiva-rest/archiva-rest-services/src/test/java/org/apache/archiva/rest/services/utils/ArtifactBuilderTest.java
archiva-modules/archiva-web/archiva-security/src/test/java/org/apache/archiva/security/mock/MockBeanServices.java
archiva-modules/archiva-web/archiva-web-common/src/main/java/org/apache/archiva/web/api/DefaultFileUploadService.java
archiva-modules/archiva-web/archiva-webdav/pom.xml
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResource.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaDavResourceFactory.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/ArchivaVirtualDavResource.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/RepositoryServlet.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/IndexWriter.java
archiva-modules/archiva-web/archiva-webdav/src/main/java/org/apache/archiva/webdav/util/TemporaryGroupIndexSessionCleaner.java
archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/DavResourceTest.java
archiva-modules/archiva-web/archiva-webdav/src/test/java/org/apache/archiva/webdav/OverridingRepositoryProxyHandler.java
archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryPathTranslator.java
archiva-modules/metadata/metadata-repository-api/src/main/java/org/apache/archiva/metadata/repository/storage/RepositoryStorage.java
archiva-modules/plugins/metadata-store-cassandra/src/test/java/org/apache/archiva/metadata/repository/cassandra/MockRepositoryStorage.java
archiva-modules/plugins/stage-repository-merge/src/main/java/org/apache/archiva/stagerepository/merge/Maven2RepositoryMerger.java
pom.xml

index fd4565cce75c064dfa44cfb3ee2f9031a683ec52..5961fd259b36ab116bbf7c039767c47112f8b9b8 100644 (file)
   </properties>
 
   <dependencies>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-policies</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva.redback.components.registry</groupId>
       <artifactId>spring-registry-api</artifactId>
index 44b7daa19102d19170df8a583f60cea7186ee4b0..6354e3a2a3bfc7dd414cb36990729a4f8923453a 100644 (file)
@@ -31,6 +31,8 @@ import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.repository.ContentNotFoundException;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.events.RepositoryListener;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -211,22 +213,22 @@ public abstract class AbstractRepositoryPurge
                         log.error( "Error during metadata retrieval {}: {}", metaBaseId, e.getMessage( ) );
                     }
                 }
-                Path artifactFile = repository.toFile( reference );
+                StorageAsset artifactFile = repository.toFile( reference );
 
                 for ( RepositoryListener listener : listeners )
                 {
                     listener.deleteArtifact( metadataRepository, repository.getId( ), reference.getGroupId( ),
                         reference.getArtifactId( ), reference.getVersion( ),
-                        artifactFile.getFileName( ).toString( ) );
+                            artifactFile.getName( ));
                 }
                 try
                 {
-                    Files.delete( artifactFile );
-                    log.debug( "File deleted: {}", artifactFile.toAbsolutePath( ) );
+                    artifactFile.getStorage().removeAsset(artifactFile);
+                    log.debug( "File deleted: {}", artifactFile );
                 }
                 catch ( IOException e )
                 {
-                    log.error( "Could not delete file {}: {}", artifactFile.toAbsolutePath( ), e.getMessage( ), e );
+                    log.error( "Could not delete file {}: {}", artifactFile.toString(), e.getMessage( ), e );
                     continue;
                 }
                 try
@@ -364,11 +366,11 @@ public abstract class AbstractRepositoryPurge
         }
     }
 
-    private void deleteSilently( Path path )
+    private void deleteSilently( StorageAsset path )
     {
         try
         {
-            Files.deleteIfExists( path );
+            path.getStorage().removeAsset(path);
             triggerAuditEvent( repository.getRepository( ).getId( ), path.toString( ), AuditEvent.PURGE_FILE );
         }
         catch ( IOException e )
@@ -387,22 +389,23 @@ public abstract class AbstractRepositoryPurge
      *
      * @param artifactFile the file to base off of.
      */
-    private void purgeSupportFiles( Path artifactFile )
+    private void purgeSupportFiles( StorageAsset artifactFile )
     {
-        Path parentDir = artifactFile.getParent( );
+        StorageAsset parentDir = artifactFile.getParent( );
 
-        if ( !Files.exists( parentDir ) )
+        if ( !parentDir.exists() )
         {
             return;
         }
 
-        final String artifactName = artifactFile.getFileName( ).toString( );
+        final String artifactName = artifactFile.getName( );
 
         try
         {
-            Files.find( parentDir, 3,
-                ( path, basicFileAttributes ) -> path.getFileName( ).toString( ).startsWith( artifactName )
-                    && Files.isRegularFile( path ) ).forEach( this::deleteSilently );
+
+            StorageUtil.recurse(parentDir, a -> {
+                if (!artifactFile.isContainer() && artifactFile.getName().startsWith(artifactName)) deleteSilently(a);
+            }, true, 3 );
         }
         catch ( IOException e )
         {
index 6c6607d2d282268f83cb0dcc39a8883f027bf6d8..9ff31ec60d139fca70f101686cb26b26b16d6603 100644 (file)
@@ -28,6 +28,7 @@ import org.apache.archiva.repository.ContentNotFoundException;
 import org.apache.archiva.repository.LayoutException;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.events.RepositoryListener;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.time.DateUtils;
 
 import java.io.IOException;
@@ -115,12 +116,12 @@ public class DaysOldRepositoryPurge
                     artifactFile.toAbsolutePath( ).toString() );
                 newArtifactReference.setVersion( version );
 
-                Path newArtifactFile = repository.toFile( newArtifactReference );
+                StorageAsset newArtifactFile = repository.toFile( newArtifactReference );
 
                 // Is this a generic snapshot "1.0-SNAPSHOT" ?
                 if ( VersionUtil.isGenericSnapshot( newArtifactReference.getVersion( ) ) )
                 {
-                    if ( Files.getLastModifiedTime( newArtifactFile ).toMillis() < olderThanThisDate.getTimeInMillis( ) )
+                    if ( newArtifactFile.getModificationTime().toEpochMilli() < olderThanThisDate.getTimeInMillis( ) )
                     {
                         artifactsToDelete.addAll( repository.getRelatedArtifacts( newArtifactReference ) );
                     }
@@ -138,7 +139,7 @@ public class DaysOldRepositoryPurge
             }
             purge( artifactsToDelete );
         }
-        catch ( ContentNotFoundException | IOException e )
+        catch ( ContentNotFoundException e )
         {
             throw new RepositoryPurgeException( e.getMessage( ), e );
         }
index 63acd67509dd6ccd5cde6777056f422837b69437..ab5f64081164f02975fd8b3533a8c690f93b7be9 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-common</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-checksum</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-fs</artifactId>
+      <scope>test</scope>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-test-utils</artifactId>
index cf1c0da4a5aceb4d7ae7f37a86f62ab850898592..394917cddbcfa68a3ad3b76f58afcaa9f8f0b62d 100644 (file)
@@ -20,16 +20,13 @@ package org.apache.archiva.policies;
  */
 
 import org.apache.archiva.common.utils.VersionUtil;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Calendar;
-import java.util.Date;
 import java.util.List;
 import java.util.Properties;
 
index ac20dbc8edc7461f1921f00dbee5dd89a84cf3ca..7cd1294d076849c3d39fce865fa6ed20a57270ac 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.archiva.policies;
  */
 
 import org.apache.archiva.policies.urlcache.UrlFailureCache;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
index 8f6885a6bf20a3afd783f63ba47871148edb6639..99c5ddab4354066f056fd57c13473948031e2977 100644 (file)
@@ -22,7 +22,7 @@ package org.apache.archiva.policies;
 import org.apache.archiva.checksum.ChecksumAlgorithm;
 import org.apache.archiva.checksum.ChecksummedFile;
 import org.apache.archiva.checksum.UpdateStatus;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
index 28de5336b99d6900b7ee6214eb23b1d67bbefd68..43ae03e0f6d1e963634e8cd973de155f87043a81 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.archiva.policies;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.util.Map;
 import java.util.Properties;
index f9a14433127214caec134365e0978d565f3973f6..1ee2713db4ce5eb74a7fccaca63c3f9ffd50278f 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.archiva.policies;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.util.Properties;
 
index 7a0baefe1c41288fdf82ec2cd25a9e8195476316..7726ebb863119ecd108ff99564789338bb7a249f 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.archiva.policies;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
index 9b22a9b23166aa2aae950206de1b05d6b11ec402..a50c1b9e0e84730183b5405964cf34b802409724 100644 (file)
@@ -19,11 +19,10 @@ package org.apache.archiva.policies;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.springframework.stereotype.Service;
 
-import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
index 2c9995c0b9c1dba3117595d69893e90097ddcdf8..d27c392bedd4f5aef9baef9f0f6e92a172ba96d2 100644 (file)
@@ -20,7 +20,10 @@ package org.apache.archiva.policies;
  */
 
 import junit.framework.TestCase;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.policies.urlcache.UrlFailureCache;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -28,6 +31,7 @@ import org.springframework.test.context.ContextConfiguration;
 
 import javax.inject.Inject;
 import javax.inject.Named;
+import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.Properties;
@@ -47,6 +51,8 @@ public class CachedFailuresPolicyTest
     @Inject
     private UrlFailureCache urlFailureCache;
 
+    private FilesystemStorage filesystemStorage;
+
     @Inject
     @Named( value = "preDownloadPolicy#cache-failures" )
     DownloadPolicy downloadPolicy;
@@ -57,9 +63,11 @@ public class CachedFailuresPolicyTest
         return downloadPolicy;
     }
 
-    private Path getFile()
-    {
-        return Paths.get( "target/cache-failures/" + getName() + ".txt" );
+    private StorageAsset getFile() throws IOException {
+        if (filesystemStorage==null) {
+            filesystemStorage = new FilesystemStorage(Paths.get("target/cache-failures"), new DefaultFileLockManager());
+        }
+        return filesystemStorage.getAsset( getName() + ".txt" );
     }
 
     private Properties createRequest()
@@ -74,7 +82,7 @@ public class CachedFailuresPolicyTest
         throws Exception
     {
         DownloadPolicy policy = lookupPolicy();
-        Path localFile = getFile();
+        StorageAsset localFile = getFile();
         Properties request = createRequest();
 
         request.setProperty( "url", "http://a.bad.hostname.maven.org/path/to/resource.txt" );
@@ -88,7 +96,7 @@ public class CachedFailuresPolicyTest
     {
 
         DownloadPolicy policy = lookupPolicy();
-        Path localFile = getFile();
+        StorageAsset localFile = getFile();
         Properties request = createRequest();
         // make unique name
         String url = "http://a.bad.hostname.maven.org/path/to/resource"+ System.currentTimeMillis() +".txt";
index 3faad0b9786fc6f7487ea9614d6b9a16ba267583..4a8c6cb8176dd69e4bdb4d1d39494a83b5c9a962 100644 (file)
@@ -19,6 +19,9 @@ package org.apache.archiva.policies;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.apache.commons.io.FileUtils;
 import org.junit.Rule;
@@ -31,6 +34,7 @@ import javax.inject.Inject;
 import javax.inject.Named;
 import java.io.BufferedReader;
 import java.io.FileReader;
+import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -51,6 +55,8 @@ public class ChecksumPolicyTest
 
     private static final String BAD = "bad";
 
+    private static FilesystemStorage filesystemStorage;
+
     @Inject
     @Named( value = "postDownloadPolicy#checksum" )
     PostDownloadPolicy downloadPolicy;
@@ -195,7 +201,7 @@ public class ChecksumPolicyTest
         throws Exception
     {
         PostDownloadPolicy policy = lookupPolicy();
-        Path localFile = createTestableFiles( null, null );
+        StorageAsset localFile = createTestableFiles( null, null );
         Properties request = createRequest();
 
         policy.applyPolicy( ChecksumPolicy.IGNORE, request, localFile );
@@ -205,7 +211,7 @@ public class ChecksumPolicyTest
         throws Exception
     {
         PostDownloadPolicy policy = lookupPolicy();
-        Path localFile = createTestableFiles( md5State, sha1State );
+        StorageAsset localFile = createTestableFiles( md5State, sha1State );
         Properties request = createRequest();
 
         boolean actualResult;
@@ -220,9 +226,9 @@ public class ChecksumPolicyTest
             actualResult = false;
             String msg = createMessage( ChecksumPolicy.FAIL, md5State, sha1State );
 
-            assertFalse( msg + " local file should not exist:", Files.exists(localFile) );
-            Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
-            Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
+            assertFalse( msg + " local file should not exist:", localFile.exists() );
+            Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );
+            Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
             assertFalse( msg + " local md5 file should not exist:", Files.exists(md5File) );
             assertFalse( msg + " local sha1 file should not exist:", Files.exists(sha1File) );
         }
@@ -234,7 +240,7 @@ public class ChecksumPolicyTest
         throws Exception
     {
         PostDownloadPolicy policy = lookupPolicy();
-        Path localFile = createTestableFiles( md5State, sha1State );
+        StorageAsset localFile = createTestableFiles( md5State, sha1State );
         Properties request = createRequest();
 
         boolean actualResult;
@@ -252,8 +258,8 @@ public class ChecksumPolicyTest
         assertEquals( createMessage( ChecksumPolicy.FIX, md5State, sha1State ), expectedResult, actualResult );
 
         // End result should be legitimate SHA1 and MD5 files.
-        Path md5File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".md5" );
-        Path sha1File = localFile.toAbsolutePath().resolveSibling( localFile.getFileName() + ".sha1" );
+        Path md5File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".md5" );
+        Path sha1File = localFile.getFilePath().toAbsolutePath().resolveSibling( localFile.getName() + ".sha1" );
 
         assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", Files.exists(md5File) && Files.isRegularFile(md5File) );
         assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
@@ -336,37 +342,41 @@ public class ChecksumPolicyTest
         return request;
     }
 
-    private Path createTestableFiles( String md5State, String sha1State )
+    private StorageAsset createTestableFiles(String md5State, String sha1State )
         throws Exception
     {
-        Path sourceDir = getTestFile( "src/test/resources/checksums/" );
-        Path destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );
+        FilesystemStorage fs = new FilesystemStorage(Paths.get("target/checksum-tests"), new DefaultFileLockManager());
+        StorageAsset sourceDir = getTestFile( "src/test/resources/checksums/" );
+        StorageAsset destDir = getTestFile( "target/checksum-tests/" + name.getMethodName() + "/" );
 
-        FileUtils.copyFileToDirectory( sourceDir.resolve("artifact.jar" ).toFile(), destDir.toFile() );
+        FileUtils.copyFileToDirectory( sourceDir.getFilePath().resolve("artifact.jar" ).toFile(), destDir.getFilePath().toFile() );
 
         if ( md5State != null )
         {
-            Path md5File = sourceDir.resolve("artifact.jar.md5-" + md5State );
+            Path md5File = sourceDir.getFilePath().resolve("artifact.jar.md5-" + md5State );
             assertTrue( "Testable file exists: " + md5File.getFileName() + ":", Files.exists(md5File) && Files.isRegularFile(md5File) );
-            Path destFile = destDir.resolve("artifact.jar.md5" );
+            Path destFile = destDir.getFilePath().resolve("artifact.jar.md5" );
             FileUtils.copyFile( md5File.toFile(), destFile.toFile() );
         }
 
         if ( sha1State != null )
         {
-            Path sha1File = sourceDir.resolve("artifact.jar.sha1-" + sha1State );
+            Path sha1File = sourceDir.getFilePath().resolve("artifact.jar.sha1-" + sha1State );
             assertTrue( "Testable file exists: " + sha1File.getFileName() + ":", Files.exists(sha1File) && Files.isRegularFile(sha1File) );
-            Path destFile = destDir.resolve("artifact.jar.sha1" );
+            Path destFile = destDir.getFilePath().resolve("artifact.jar.sha1" );
             FileUtils.copyFile( sha1File.toFile(), destFile.toFile() );
         }
 
-        Path localFile = destDir.resolve("artifact.jar" );
-        return localFile;
+
+        StorageAsset localAsset = fs.getAsset("artifact.jar");
+        return localAsset;
     }
 
-    public static Path getTestFile( String path )
-    {
-        return Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), path );
+    public static StorageAsset getTestFile( String path ) throws IOException {
+        if (filesystemStorage==null) {
+            filesystemStorage = new FilesystemStorage(Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()), new DefaultFileLockManager());
+        }
+        return filesystemStorage.getAsset( path );
     }
 
 }
index adf341b299543b1fdfe691bde5bac339f7b4ec94..3c5bae066b8fc2830624c0acf3b7b7d6ecdf76de 100644 (file)
@@ -20,6 +20,7 @@ package org.apache.archiva.policies;
  */
 
 import junit.framework.TestCase;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.junit.Before;
 import org.junit.Test;
@@ -338,17 +339,17 @@ public class ReleasePolicyTest
             request.setProperty( "version", "2.0" );
         }
 
-        Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
-        Path localFile = targetDir.resolve( path );
+        StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
+        StorageAsset localFile = targetDir.resolve( path );
 
-        Files.deleteIfExists( localFile );
+        Files.deleteIfExists( localFile.getFilePath() );
 
         if ( createLocalFile )
         {
-            Files.createDirectories(  localFile.getParent());
-            org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
-            Files.setLastModifiedTime( localFile,
-                FileTime.fromMillis(Files.getLastModifiedTime(localFile).toMillis() - generatedLocalFileUpdateDelta));
+            Files.createDirectories(  localFile.getParent().getFilePath());
+            org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
+            Files.setLastModifiedTime( localFile.getFilePath(),
+                FileTime.fromMillis(Files.getLastModifiedTime(localFile.getFilePath()).toMillis() - generatedLocalFileUpdateDelta));
         }
 
         policy.applyPolicy( setting, request, localFile );
index 5272777fe9021fb6a7982f52e9d081f6ea7660ac..a68ae8dd9aa66d06d33941f5d787a530b19238b2 100644 (file)
@@ -20,6 +20,8 @@ package org.apache.archiva.policies;
  */
 
 import junit.framework.TestCase;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.junit.Before;
 import org.junit.Test;
@@ -80,6 +82,8 @@ public class SnapshotsPolicyTest
     @Inject @Named(value="preDownloadPolicy#snapshots")
     PreDownloadPolicy policy;
 
+    private FilesystemStorage filesystemStorage;
+
     private PreDownloadPolicy lookupPolicy()
         throws Exception
     {
@@ -337,17 +341,17 @@ public class SnapshotsPolicyTest
             request.setProperty( "version", "2.0" );
         }
 
-        Path targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
-        Path localFile = targetDir.resolve( path );
+        StorageAsset targetDir = ChecksumPolicyTest.getTestFile( "target/test-policy/" );
+        StorageAsset localFile = targetDir.resolve( path );
 
-        Files.deleteIfExists( localFile );
+        Files.deleteIfExists( localFile.getFilePath() );
 
         if ( createLocalFile )
         {
-            Files.createDirectories( localFile.getParent());
-            org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile, FILE_ENCODING, "random-junk" );
-            Files.setLastModifiedTime( localFile,
-                FileTime.fromMillis( Files.getLastModifiedTime( localFile ).toMillis() - generatedLocalFileUpdateDelta ));
+            Files.createDirectories( localFile.getParent().getFilePath() );
+            org.apache.archiva.common.utils.FileUtils.writeStringToFile( localFile.getFilePath(), FILE_ENCODING, "random-junk" );
+            Files.setLastModifiedTime( localFile.getFilePath(),
+                FileTime.fromMillis( Files.getLastModifiedTime( localFile.getFilePath() ).toMillis() - generatedLocalFileUpdateDelta ));
         }
 
         policy.applyPolicy( setting, request, localFile );
index 700e4a56083079fcaaebc79051a5abdf1f15be6b..b4faf2da75404518987f2384072775625049fcf2 100644 (file)
@@ -20,9 +20,7 @@ package org.apache.archiva.proxy.model;
  */
 
 
-import org.apache.archiva.repository.content.StorageAsset;
-
-import java.nio.file.Path;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 /**
  * A result from a proxy fetch operation.
index f82d555140c8c8f2ddd1cf8e314aeaf1268fbe6f..efcd901f0253e78c3bc0c15214cdf22394c45da4 100644 (file)
@@ -23,7 +23,7 @@ import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.policies.ProxyDownloadException;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.util.List;
 import java.util.Map;
index fac557ed43f1bb27a7502b55bd2931e8514abe88..d5f6f94a90216c9f6dcf583e9c4aa51c27d54528 100644 (file)
@@ -22,10 +22,7 @@ package org.apache.archiva.proxy;
 import org.apache.archiva.checksum.ChecksumAlgorithm;
 import org.apache.archiva.checksum.ChecksumUtil;
 import org.apache.archiva.proxy.model.ProxyConnectorRuleType;
-import org.apache.archiva.common.filelock.FileLockException;
 import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.common.filelock.FileLockTimeoutException;
-import org.apache.archiva.common.filelock.Lock;
 import org.apache.archiva.configuration.*;
 import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.model.Keys;
@@ -39,9 +36,9 @@ import org.apache.archiva.redback.components.registry.Registry;
 import org.apache.archiva.redback.components.registry.RegistryListener;
 import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.FilesystemStorage;
-import org.apache.archiva.repository.content.StorageAsset;
-import org.apache.archiva.repository.content.StorageUtil;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.archiva.repository.metadata.MetadataTools;
 import org.apache.archiva.repository.metadata.RepositoryMetadataException;
 import org.apache.archiva.scheduler.ArchivaTaskScheduler;
@@ -61,7 +58,6 @@ import javax.inject.Named;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
index 8cef4f30b6082c8fcc65f1bf4e3fccabc27f78cd..e98e8321a66d06088eb3f3d0dadb774473d546c9 100644 (file)
@@ -21,8 +21,8 @@ package org.apache.archiva.admin.model.group;
 import org.apache.archiva.admin.model.AuditInformation;
 import org.apache.archiva.admin.model.RepositoryAdminException;
 import org.apache.archiva.admin.model.beans.RepositoryGroup;
+import org.apache.archiva.repository.storage.StorageAsset;
 
-import java.nio.file.Path;
 import java.util.List;
 import java.util.Map;
 
@@ -75,5 +75,5 @@ public interface RepositoryGroupAdmin
     Map<String, List<String>> getRepositoryToGroupMap()
         throws RepositoryAdminException;
 
-    Path getMergedIndexDirectory(String repositoryGroupId );
+    StorageAsset getMergedIndexDirectory(String repositoryGroupId );
 }
index 757aa12a87aad4bd73b0dac02f5e8ee81b1f92f7..39919a88e86bd3d2e05d2694839dbf3e561d1ff3 100644 (file)
@@ -29,11 +29,11 @@ import org.apache.archiva.configuration.Configuration;
 import org.apache.archiva.configuration.RepositoryGroupConfiguration;
 import org.apache.archiva.metadata.model.facets.AuditEvent;
 import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
-import org.apache.archiva.repository.EditableRepository;
 import org.apache.archiva.repository.EditableRepositoryGroup;
 import org.apache.archiva.repository.RepositoryException;
 import org.apache.archiva.repository.RepositoryRegistry;
 import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -47,9 +47,6 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -112,9 +109,14 @@ public class DefaultRepositoryGroupAdmin
 
 
     @Override
-    public Path getMergedIndexDirectory( String repositoryGroupId )
+    public StorageAsset getMergedIndexDirectory(String repositoryGroupId )
     {
-        return groupsDirectory.resolve( repositoryGroupId );
+        org.apache.archiva.repository.RepositoryGroup group = repositoryRegistry.getRepositoryGroup(repositoryGroupId);
+        if (group!=null) {
+            return group.getFeature(IndexCreationFeature.class).get().getLocalIndexPath();
+        } else {
+            return null;
+        }
     }
 
     @Override
index 064045de9dc8dc80ddc15c3ea0efe65357f1cd07..1bd7e4c1039bcb5d430cbe4236e02da96e0ef932 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.archiva.admin.mock;
  * under the License.
  */
 
-import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -40,10 +40,12 @@ import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryType;
 import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.index.ArtifactContext;
 import org.apache.maven.index.ArtifactContextProducer;
@@ -143,7 +145,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
 
     private Path getIndexPath( ArchivaIndexingContext ctx )
     {
-        return PathUtil.getPathFromUri( ctx.getPath( ) );
+        return ctx.getPath( ).getFilePath();
     }
 
     @FunctionalInterface
@@ -378,9 +380,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.addArtifactsToIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -394,9 +396,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -455,7 +457,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
                 log.warn("Index close failed");
             }
             try {
-                FileUtils.deleteDirectory(Paths.get(context.getPath()));
+                StorageUtil.deleteRecursively(context.getPath());
             } catch (IOException e) {
                 throw new IndexUpdateFailedException("Could not delete index files");
             }
@@ -530,6 +532,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
         URI indexDir = icf.getIndexPath();
         String indexPath = indexDir.getPath();
         Path indexDirectory = null;
+        FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
         if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
         {
 
@@ -538,6 +541,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
             if ( indexDirectory.isAbsolute( ) )
             {
                 indexPath = indexDirectory.getFileName().toString();
+                filesystemStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
             }
             else
             {
@@ -554,7 +558,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
         {
             Files.createDirectories( indexDirectory );
         }
-        return new FilesystemAsset( indexPath, indexDirectory);
+        return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
     }
 
     private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
index 3bed602930333d79798c74436b6bde5c91140627..4b4528bc8e2de62b0e83fbf344175677113c09c6 100644 (file)
@@ -19,8 +19,12 @@ package org.apache.archiva.admin.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.index.context.IndexingContext;
 
 import java.io.IOException;
@@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
 
     private IndexingContext delegate;
     private Repository repository;
+    private FilesystemStorage filesystemStorage;
 
     MavenIndexContextMock(Repository repository, IndexingContext delegate) {
         this.delegate = delegate;
         this.repository = repository;
+        try {
+            this.filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
 
     }
 
@@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
     }
 
     @Override
-    public URI getPath() {
-        return delegate.getIndexDirectoryFile().toURI();
+    public StorageAsset getPath() {
+        return
+                new FilesystemAsset(filesystemStorage, "", delegate.getIndexDirectoryFile().toPath());
     }
 
     @Override
index 4cb8ca78d84304b743f9403d0a92a26c08f2e5a9..6222e7c51996c7282794e6d8012171697cba3bd9 100644 (file)
@@ -21,10 +21,9 @@ package org.apache.archiva.admin.mock;
 
 import org.apache.archiva.indexer.merger.MergedRemoteIndexesScheduler;
 import org.apache.archiva.repository.RepositoryGroup;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.springframework.stereotype.Service;
 
-import java.nio.file.Path;
-
 /**
  * @author Olivier Lamy
  */
@@ -34,7 +33,7 @@ public class MockMergedRemoteIndexesScheduler
 {
 
     @Override
-    public void schedule( RepositoryGroup repositoryGroup, Path directory )
+    public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
     {
         // no op
     }
index b5f10a7dbeb3c8ebd4514c609aa395ac05c02d25..7251ab2a6026bafb89c5960798ecae1c9a204f4c 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-common</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
 
     <dependency>
       <groupId>commons-lang</groupId>
index 5eb57705f3c1fd882b6d30ee186bfc272927d823..8e38b829a5d5656cb7b2be9c77283b393e7e8436 100644 (file)
@@ -21,10 +21,8 @@ package org.apache.archiva.indexer;
 
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.content.StorageAsset;
 
 import java.net.URI;
-import java.nio.file.Path;
 import java.util.Collection;
 import java.util.List;
 
index 79535459831e4f7eabc99df84883662cc8eb6aca..2dee4412d41568f65a7a15794207cfa5cb7b6665 100644 (file)
@@ -20,6 +20,7 @@ package org.apache.archiva.indexer;
  */
 
 import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.io.IOException;
 import java.net.URI;
@@ -48,7 +49,7 @@ public interface ArchivaIndexingContext {
      * The path where the index is stored.
      * @return
      */
-    URI getPath();
+    StorageAsset getPath();
 
     /**
      * Returns true, if the index has no entries or is not initialized.
index 787e02d5b70d5a4e942dbd218b632f55520720da..9a8bae159a9dc9ae44664be1e3f7c0c516e5cbc2 100644 (file)
@@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger;
  * under the License.
  */
 
+import org.apache.archiva.repository.storage.StorageAsset;
+
 import java.nio.file.Path;
 import java.util.Collection;
 
@@ -47,7 +49,7 @@ public class IndexMergerRequest
 
     private int mergedIndexTtl;
 
-    private Path mergedIndexDirectory;
+    private StorageAsset mergedIndexDirectory;
 
     private boolean temporary;
 
@@ -121,17 +123,17 @@ public class IndexMergerRequest
         this.mergedIndexTtl = mergedIndexTtl;
     }
 
-    public Path getMergedIndexDirectory()
+    public StorageAsset getMergedIndexDirectory()
     {
         return mergedIndexDirectory;
     }
 
-    public void setMergedIndexDirectory( Path mergedIndexDirectory )
+    public void setMergedIndexDirectory( StorageAsset mergedIndexDirectory )
     {
         this.mergedIndexDirectory = mergedIndexDirectory;
     }
 
-    public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory )
+    public IndexMergerRequest mergedIndexDirectory( StorageAsset mergedIndexDirectory )
     {
         this.mergedIndexDirectory = mergedIndexDirectory;
         return this;
index 8125610b6e0f6d59f0a2341aeab3aac07e437fe7..79eaaac3a8e33b0c9a022017c680686038f2a4b6 100644 (file)
@@ -21,8 +21,7 @@ package org.apache.archiva.indexer.merger;
 
 
 import org.apache.archiva.repository.RepositoryGroup;
-
-import java.nio.file.Path;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 /**
  * @author Olivier Lamy
@@ -35,8 +34,9 @@ public interface MergedRemoteIndexesScheduler
      * will check if this repository group need to a schedule a cron to download/merge
      * remote indexes
      * @param repositoryGroup
+     * @param directory
      */
-    void schedule(RepositoryGroup repositoryGroup, Path directory );
+    void schedule(RepositoryGroup repositoryGroup, StorageAsset directory );
 
     void unschedule( RepositoryGroup repositoryGroup );
 
index d0b576b74ef30dfa1395b05d9dfcc404612203c1..8f5588893439ac0a9e67953290fc95e3bc3b3af2 100644 (file)
@@ -18,6 +18,8 @@ package org.apache.archiva.indexer.merger;
  * under the License.
  */
 
+import org.apache.archiva.repository.storage.StorageAsset;
+
 import java.io.Serializable;
 import java.nio.file.Path;
 import java.util.Date;
@@ -30,7 +32,7 @@ public class TemporaryGroupIndex
 {
     private long creationTime = new Date().getTime();
 
-    private Path directory;
+    private StorageAsset directory;
 
     private String indexId;
 
@@ -38,7 +40,7 @@ public class TemporaryGroupIndex
 
     private int mergedIndexTtl;
 
-    public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl)
+    public TemporaryGroupIndex(StorageAsset directory, String indexId, String groupId, int mergedIndexTtl)
     {
         this.directory = directory;
         this.indexId = indexId;
@@ -57,12 +59,12 @@ public class TemporaryGroupIndex
         return this;
     }
 
-    public Path getDirectory()
+    public StorageAsset getDirectory()
     {
         return directory;
     }
 
-    public TemporaryGroupIndex setDirectory( Path directory )
+    public TemporaryGroupIndex setDirectory( StorageAsset directory )
     {
         this.directory = directory;
         return this;
index c50a89cf223f7d1c18be7826bfa84c0d604d0e34..5cabc3f6a3ee82f1734df791476db5f9c47e1013 100644 (file)
@@ -23,7 +23,7 @@ import org.apache.archiva.model.ArchivaArtifact;
 import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.model.ProjectReference;
 import org.apache.archiva.model.VersionedReference;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.util.Set;
 
index 453badc2d86605d3811a1e64bd1ed86053116ac0..d6c38a04d9ebf5bdb8522ae767c5fa17bc7b142e 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.archiva.repository;
  */
 
 import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.content.RepositoryStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 import org.apache.archiva.repository.features.RepositoryFeature;
 
 import java.net.URI;
index 8386c583857ecf29043b77d6a24bc6d8f6e318f6..5b50dc0a7b95ee2c9586f8d75632d225cfb6d39a 100644 (file)
@@ -19,8 +19,7 @@ package org.apache.archiva.repository;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 
 import java.util.List;
 
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/RepositoryStorage.java
deleted file mode 100644 (file)
index 3dea0b2..0000000
+++ /dev/null
@@ -1,148 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.CopyOption;
-import java.util.function.Consumer;
-
-/**
- * Repository storage gives access to the files and directories on the storage.
- * The storage may be on a filesystem but can be any other storage system.
- *
- * This API is low level repository access. If you use this API you must
- * either have knowledge about the specific repository layout or use the structure
- * as it is, e.g. for browsing.
- *
- * It is the decision of the implementation, if this API provides access to all elements, or
- * just a selected view.
- *
- * Checking access is not part of this API.
- */
-public interface RepositoryStorage {
-    /**
-     * Returns information about a specific storage asset.
-     * @param path
-     * @return
-     */
-    StorageAsset getAsset(String path);
-
-    /**
-     * Consumes the data and sets a lock for the file during the operation.
-     *
-     * @param asset The asset from which the data is consumed.
-     * @param consumerFunction The consumer that reads the data
-     * @param readLock If true, a read lock is acquired on the asset.
-     * @throws IOException
-     */
-    void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock) throws IOException;
-
-    /**
-     * Consumes the data and sets a lock for the file during the operation.
-     *
-     * @param asset The asset from which the data is consumed.
-     * @param consumerFunction The consumer that reads the data
-     * @param readLock If true, a read lock is acquired on the asset.
-     * @throws IOException
-     */
-    void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock) throws IOException;
-
-    /**
-     * Writes data to the asset using a write lock.
-     *
-     * @param asset The asset to which the data is written.
-     * @param consumerFunction The function that provides the data.
-     * @param writeLock If true, a write lock is acquired on the destination.
-     */
-    void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock) throws IOException;;
-
-    /**
-     * Writes data and sets a lock during the operation.
-     *
-     * @param asset The asset to which the data is written.
-     * @param consumerFunction The function that provides the data.
-     * @param writeLock If true, a write lock is acquired on the destination.
-     * @throws IOException
-     */
-    void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock) throws IOException;
-
-    /**
-     * Adds a new asset to the underlying storage.
-     * @param path The path to the asset.
-     * @param container True, if the asset should be a container, false, if it is a file.
-     * @return
-     */
-    StorageAsset addAsset(String path, boolean container);
-
-    /**
-     * Removes the given asset from the storage.
-     *
-     * @param asset
-     * @throws IOException
-     */
-    void removeAsset(StorageAsset asset) throws IOException;
-
-    /**
-     * Moves the asset to the given location and returns the asset object for the destination.
-     *
-     * @param origin The original asset
-     * @param destination The destination path pointing to the new asset.
-     * @param copyOptions The copy options.
-     * @return The asset representation of the moved object.
-     */
-    StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
-
-    /**
-     * Moves the asset to the new path.
-     *
-     * @param origin The original asset
-     * @param destination The destination asset.
-     * @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
-     * @throws IOException If it was not possible to copy the asset.
-     */
-    void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;
-
-    /**
-     * Copies the given asset to the new destination.
-     *
-     * @param origin The original asset
-     * @param destination The path to the new asset
-     * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
-     * @return The asset representation of the copied object
-     * @throws IOException If it was not possible to copy the asset
-     */
-    StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
-
-    /**
-     * Copies the given asset to the new destination.
-     *
-     * @param origin The original asset
-     * @param destination The path to the new asset
-     * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
-     * @throws IOException If it was not possible to copy the asset
-     */
-    void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;
-
-
-}
diff --git a/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java b/archiva-modules/archiva-base/archiva-repository-api/src/main/java/org/apache/archiva/repository/content/StorageAsset.java
deleted file mode 100644 (file)
index 38ef2a8..0000000
+++ /dev/null
@@ -1,180 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.Path;
-import java.time.Instant;
-import java.util.List;
-import java.util.function.Consumer;
-
-/**
- * A instance of this interface represents information about an specific asset in a repository.
- * The asset may be an real artifact, a directory, or a virtual asset.
- *
- * Each asset has a unique path relative to the repository.
- *
- * The implementation may read the data directly from the filesystem or underlying storage implementation.
- *
- * @author Martin Stockhammer <martin_s@apache.org>
- */
-public interface StorageAsset
-{
-
-    /**
-     * Returns the storage this asset belongs to.
-     * @return
-     */
-    RepositoryStorage getStorage();
-
-    /**
-     * Returns the complete path relative to the repository to the given asset.
-     *
-     * @return A path starting with '/' that uniquely identifies the asset in the repository.
-     */
-    String getPath();
-
-    /**
-     * Returns the name of the asset. It may be just the filename.
-     * @return
-     */
-    String getName();
-
-    /**
-     * Returns the time of the last modification.
-     *
-     * @return
-     */
-    Instant getModificationTime();
-
-    /**
-     * Returns true, if this asset is a container type and contains further child assets.
-     * @return
-     */
-    boolean isContainer();
-
-    /**
-     * List the child assets.
-     *
-     * @return The list of children. If there are no children and if the asset is not a container, a empty list will be returned.
-     */
-    List<StorageAsset> list();
-
-    /**
-     * The size in bytes of the asset. If the asset does not have a size, -1 should be returned.
-     *
-     * @return The size if the asset has a size, otherwise -1
-     */
-    long getSize();
-
-    /**
-     * Returns the input stream of the artifact content.
-     * It will throw a IOException, if the stream could not be created.
-     * Implementations should create a new stream instance for each invocation and make sure that the
-     * stream is proper closed after usage.
-     *
-     * @return The InputStream representing the content of the artifact.
-     * @throws IOException
-     */
-    InputStream getReadStream() throws IOException;
-
-    /**
-     * Returns a NIO representation of the data.
-     *
-     * @return A channel to the asset data.
-     * @throws IOException
-     */
-    ReadableByteChannel getReadChannel() throws IOException;
-
-    /**
-     *
-     * Returns an output stream where you can write data to the asset. The operation is not locked or synchronized.
-     * User of this method have to make sure, that the stream is proper closed after usage.
-     *
-     * @param replace If true, the original data will be replaced, otherwise the data will be appended.
-     * @return The OutputStream where the data can be written.
-     * @throws IOException
-     */
-    OutputStream getWriteStream( boolean replace) throws IOException;
-
-    /**
-     * Returns a NIO representation of the asset where you can write the data.
-     *
-     * @param replace True, if the content should be replaced by the data written to the stream.
-     * @return The Channel for writing the data.
-     * @throws IOException
-     */
-    WritableByteChannel getWriteChannel( boolean replace) throws IOException;
-
-    /**
-     * Replaces the content. The implementation may do an atomic move operation, or keep a backup. If
-     * the operation fails, the implementation should try to restore the old data, if possible.
-     *
-     * The original file may be deleted, if the storage was successful.
-     *
-     * @param newData Replaces the data by the content of the given file.
-     */
-    boolean replaceDataFromFile( Path newData) throws IOException;
-
-    /**
-     * Returns true, if the asset exists.
-     *
-     * @return True, if the asset exists, otherwise false.
-     */
-    boolean exists();
-
-    /**
-     * Creates the asset in the underlying storage, if it does not exist.
-     */
-    void create() throws IOException;
-
-    /**
-     * Returns the real path to the asset, if it exist. Not all implementations may implement this method.
-     * The method throws {@link UnsupportedOperationException}, if and only if {@link #isFileBased()} returns false.
-     *
-     * @return The filesystem path to the asset.
-     * @throws UnsupportedOperationException If the underlying storage is not file based.
-     */
-    Path getFilePath() throws UnsupportedOperationException;
-
-    /**
-     * Returns true, if the asset can return a file path for the given asset. If this is true, the  {@link #getFilePath()}
-     * will not throw a {@link UnsupportedOperationException}
-     *
-     * @return
-     */
-    boolean isFileBased();
-
-    /**
-     * Returns true, if there is a parent to this asset.
-     * @return
-     */
-    boolean hasParent();
-
-    /**
-     * Returns the parent of this asset.
-     * @return The asset, or <code>null</code>, if it does not exist.
-     */
-    StorageAsset getParent();
-}
index 636e2cb404f43045c22f7ec5c66e2368a58e6a58..ff10b2d39af14847de9a0221fe2786f31c96ce67 100644 (file)
@@ -22,12 +22,11 @@ package org.apache.archiva.repository.features;
 
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryEventListener;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.nio.file.Path;
 
 import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_INDEX_PATH;
 import static org.apache.archiva.indexer.ArchivaIndexManager.DEFAULT_PACKED_INDEX_PATH;
index 7d2c1a667addaedc4af985e26c0735ce5bc61fad..3e9335650490c3c79549f56c92c5f9a09fc1beed 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-model</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-fs</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-checksum</artifactId>
index 293b73b71c6b8f065b757429008491d229f73ae2..b04005fa666f8deaa51fe82737bf0a4cd7f985d5 100644 (file)
@@ -28,6 +28,8 @@ import org.apache.archiva.indexer.merger.IndexMergerRequest;
 import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.lang.time.StopWatch;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -87,7 +89,7 @@ public class DefaultIndexMerger
             stopWatch.reset();
             stopWatch.start();
 
-            Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
+            StorageAsset mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
             Repository destinationRepository = repositoryRegistry.getRepository(indexMergerRequest.getGroupId());
 
             ArchivaIndexManager idxManager = repositoryRegistry.getIndexManager(destinationRepository.getType());
@@ -131,10 +133,10 @@ public class DefaultIndexMerger
                 ctx.close(true);
                 temporaryGroupIndexes.remove( temporaryGroupIndex );
                 temporaryContextes.remove( ctx );
-                Path directory = temporaryGroupIndex.getDirectory();
-                if ( directory != null && Files.exists(directory) )
+                StorageAsset directory = temporaryGroupIndex.getDirectory();
+                if ( directory != null && directory.exists() )
                 {
-                    FileUtils.deleteDirectory( directory );
+                    StorageUtil.deleteRecursively( directory );
                 }
             }
         }
index b2be61184893b2ebb44d5282a315c9edf24af4ce..93dbaeaade9ded51271dc6e7ab07e190aab4b578 100644 (file)
@@ -22,6 +22,7 @@ package org.apache.archiva.indexer.merger;
 import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.RepositoryGroup;
 import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -59,7 +60,7 @@ public class DefaultMergedRemoteIndexesScheduler
     private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
 
     @Override
-    public void schedule(RepositoryGroup repositoryGroup, Path directory )
+    public void schedule(RepositoryGroup repositoryGroup, StorageAsset directory )
     {
         if ( StringUtils.isEmpty( repositoryGroup.getSchedulingDefinition() ) )
         {
index 41d703ab2477b0887ee0ef6bdb3b8dade0f67a4d..45d95ade35376016e7a37908ddab7482f51b6724 100644 (file)
@@ -20,17 +20,12 @@ package org.apache.archiva.repository;
  */
 
 
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Path;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Locale;
 import java.util.Set;
-import java.util.function.Consumer;
 
 /**
  * Simple implementation of a managed repository.
index ff2ac062ac75510fa71a8c78530a6b7badf4ac98..048ad227abf2d831771427c38db14e5e47ac9704 100644 (file)
@@ -20,7 +20,7 @@ package org.apache.archiva.repository;
  */
 
 
-import org.apache.archiva.repository.content.RepositoryStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 
 import java.nio.file.Path;
 import java.time.Duration;
index 87b5420b1b44f800acbd6e31874cc257d23b556c..c9fc9266022e0a631f5ff7c8aee016ee52bc8fac 100644 (file)
@@ -23,10 +23,9 @@ import com.cronutils.model.CronType;
 import com.cronutils.model.definition.CronDefinition;
 import com.cronutils.model.definition.CronDefinitionBuilder;
 import com.cronutils.parser.CronParser;
-import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.RepositoryFeature;
 import org.apache.archiva.repository.features.StagingRepositoryFeature;
 import org.apache.commons.lang.StringUtils;
index 2011d4a5ce0ba40b08b08825eb27b351dd6a2d52..2f78b7bf52f165550eb05e66568d914d4d732603 100644 (file)
@@ -19,18 +19,13 @@ package org.apache.archiva.repository;
  * under the License.
  */
 
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 import org.apache.commons.collections4.map.ListOrderedMap;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Path;
 import java.util.List;
 import java.util.Locale;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.function.Consumer;
 
 /**
  * Abstract repository group implementation.
index a16630b4a82eb65331554469315b54ef463c1c48..8f65cb4d3b365e03ec329efd0bd8910e843807ce 100644 (file)
@@ -21,9 +21,8 @@ package org.apache.archiva.repository;
 
 import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.repository.content.FilesystemStorage;
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 import org.apache.archiva.repository.features.ArtifactCleanupFeature;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.StagingRepositoryFeature;
@@ -31,10 +30,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.io.InputStream;
 import java.nio.file.Path;
 import java.util.Locale;
-import java.util.function.Consumer;
 
 /**
  *
index acca83a76717f963ae5cb11a35aec211760d5ff8..0675402a05e743a5ea0c523a38c5ac46bfa9abca 100644 (file)
@@ -21,8 +21,8 @@ package org.apache.archiva.repository;
 
 import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.repository.content.FilesystemStorage;
-import org.apache.archiva.repository.content.RepositoryStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
 import org.slf4j.Logger;
index e78130e33275a85d3eb90d3096c099a40a84af64..e0e61c54a316cdbe276ce10cb56489ff5ea92755 100644 (file)
@@ -26,8 +26,6 @@ import org.apache.archiva.indexer.IndexCreationFailedException;
 import org.apache.archiva.indexer.IndexManagerFactory;
 import org.apache.archiva.indexer.IndexUpdateFailedException;
 import org.apache.archiva.redback.components.registry.RegistryException;
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationEvent;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.StagingRepositoryFeature;
@@ -44,11 +42,9 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
index 7755b53d760e52067f327546038e9d58a18b9297..8bcfa97d6391ccbda952a632002659c53cd5e49d 100644 (file)
@@ -23,6 +23,7 @@ import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RepositoryContentFactory;
 import org.apache.archiva.repository.RepositoryException;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.springframework.stereotype.Service;
 
 import javax.inject.Inject;
@@ -53,4 +54,19 @@ public class ArtifactUtil {
         return Paths.get(repository.getLocation()).resolve(artifactPath);
     }
 
+    /**
+     * Returns the physical location of a given artifact in the repository. There is no check for the
+     * existence of the returned file.
+     *
+     * @param repository The repository, where the artifact is stored.
+     * @param artifactReference The artifact reference.
+     * @return The asset representation of the artifact.
+     * @throws RepositoryException
+     */
+    public StorageAsset getArtifactAsset(ManagedRepository repository, ArtifactReference artifactReference) throws RepositoryException {
+        final ManagedRepositoryContent content = repositoryContentFactory.getManagedRepositoryContent(repository);
+        final String artifactPath = content.toPath( artifactReference );
+        return repository.getAsset(artifactPath);
+    }
+
 }
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemAsset.java
deleted file mode 100644 (file)
index e0d10f8..0000000
+++ /dev/null
@@ -1,481 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.*;
-import java.nio.file.attribute.*;
-import java.time.Instant;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-/**
- * Implementation of an asset that is stored on the filesystem.
- * <p>
- * The implementation does not check the given paths. Caller should normalize the asset path
- * and check, if the base path is a parent of the resulting path.
- * <p>
- * The file must not exist for all operations.
- *
- * @author Martin Stockhammer <martin_s@apache.org>
- */
-public class FilesystemAsset implements StorageAsset {
-
-    private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class);
-
-    private final Path basePath;
-    private final Path assetPath;
-    private final String relativePath;
-
-    public static final String DEFAULT_POSIX_FILE_PERMS = "rw-rw----";
-    public static final String DEFAULT_POSIX_DIR_PERMS = "rwxrwx---";
-
-    public static final Set<PosixFilePermission> DEFAULT_POSIX_FILE_PERMISSIONS;
-    public static final Set<PosixFilePermission> DEFAULT_POSIX_DIR_PERMISSIONS;
-
-    public static final AclEntryPermission[] DEFAULT_ACL_FILE_PERMISSIONS = new AclEntryPermission[]{
-            AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
-            AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA
-    };
-
-    public static final AclEntryPermission[] DEFAULT_ACL_DIR_PERMISSIONS = new AclEntryPermission[]{
-            AclEntryPermission.ADD_FILE, AclEntryPermission.ADD_SUBDIRECTORY, AclEntryPermission.DELETE_CHILD,
-            AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
-            AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA
-    };
-
-    static {
-
-        DEFAULT_POSIX_FILE_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_FILE_PERMS);
-        DEFAULT_POSIX_DIR_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_DIR_PERMS);
-    }
-
-    Set<PosixFilePermission> defaultPosixFilePermissions = DEFAULT_POSIX_FILE_PERMISSIONS;
-    Set<PosixFilePermission> defaultPosixDirectoryPermissions = DEFAULT_POSIX_DIR_PERMISSIONS;
-
-    List<AclEntry> defaultFileAcls;
-    List<AclEntry> defaultDirectoryAcls;
-
-    boolean supportsAcl = false;
-    boolean supportsPosix = false;
-    final boolean setPermissionsForNew;
-    final RepositoryStorage storage;
-
-    boolean directoryHint = false;
-
-    private static final OpenOption[] REPLACE_OPTIONS = new OpenOption[]{StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE};
-    private static final OpenOption[] APPEND_OPTIONS = new OpenOption[]{StandardOpenOption.APPEND};
-
-
-    FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) {
-        this.assetPath = assetPath;
-        this.relativePath = path;
-        this.setPermissionsForNew=false;
-        this.basePath = basePath;
-        this.storage = storage;
-        init();
-    }
-
-    /**
-     * Creates an asset for the given path. The given paths are not checked.
-     * The base path should be an absolute path.
-     *
-     * @param path The logical path for the asset relative to the repository.
-     * @param assetPath The asset path.
-     */
-    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) {
-        this.assetPath = assetPath;
-        this.relativePath = path;
-        this.setPermissionsForNew = false;
-        this.basePath = null;
-        this.storage = storage;
-        init();
-    }
-
-    /**
-     * Creates an asset for the given path. The given paths are not checked.
-     * The base path should be an absolute path.
-     *
-     * @param path The logical path for the asset relative to the repository
-     * @param assetPath The asset path.
-     * @param directory This is only relevant, if the represented file or directory does not exist yet and
-     *                  is a hint.
-     */
-    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
-        this.assetPath = assetPath;
-        this.relativePath = path;
-        this.directoryHint = directory;
-        this.setPermissionsForNew = false;
-        this.basePath = basePath;
-        this.storage = storage;
-        init();
-    }
-
-    /**
-     * Creates an asset for the given path. The given paths are not checked.
-     * The base path should be an absolute path.
-     *
-     * @param path The logical path for the asset relative to the repository
-     * @param assetPath The asset path.
-     * @param directory This is only relevant, if the represented file or directory does not exist yet and
-     *                  is a hint.
-     */
-    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
-        this.assetPath = assetPath;
-        this.relativePath = path;
-        this.directoryHint = directory;
-        this.setPermissionsForNew = setPermissionsForNew;
-        this.basePath = basePath;
-        this.storage = storage;
-        init();
-    }
-
-    private void init() {
-
-        if (setPermissionsForNew) {
-            try {
-                supportsAcl = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(AclFileAttributeView.class);
-            } catch (IOException e) {
-                log.error("Could not check filesystem capabilities {}", e.getMessage());
-            }
-            try {
-                supportsPosix = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(PosixFileAttributeView.class);
-            } catch (IOException e) {
-                log.error("Could not check filesystem capabilities {}", e.getMessage());
-            }
-
-            if (supportsAcl) {
-                AclFileAttributeView aclView = Files.getFileAttributeView(assetPath.getParent(), AclFileAttributeView.class);
-                UserPrincipal owner = null;
-                try {
-                    owner = aclView.getOwner();
-                    setDefaultFileAcls(processPermissions(owner, DEFAULT_ACL_FILE_PERMISSIONS));
-                    setDefaultDirectoryAcls(processPermissions(owner, DEFAULT_ACL_DIR_PERMISSIONS));
-
-                } catch (IOException e) {
-                    supportsAcl = false;
-                }
-
-
-            }
-        }
-    }
-
-    private List<AclEntry> processPermissions(UserPrincipal owner, AclEntryPermission[] defaultAclFilePermissions) {
-        AclEntry.Builder aclBuilder = AclEntry.newBuilder();
-        aclBuilder.setPermissions(defaultAclFilePermissions);
-        aclBuilder.setType(AclEntryType.ALLOW);
-        aclBuilder.setPrincipal(owner);
-        ArrayList<AclEntry> aclList = new ArrayList<>();
-        aclList.add(aclBuilder.build());
-        return aclList;
-    }
-
-
-    @Override
-    public RepositoryStorage getStorage( )
-    {
-        return storage;
-    }
-
-    @Override
-    public String getPath() {
-        return relativePath;
-    }
-
-    @Override
-    public String getName() {
-        return assetPath.getFileName().toString();
-    }
-
-    @Override
-    public Instant getModificationTime() {
-        try {
-            return Files.getLastModifiedTime(assetPath).toInstant();
-        } catch (IOException e) {
-            log.error("Could not read modification time of {}", assetPath);
-            return Instant.now();
-        }
-    }
-
-    /**
-     * Returns true, if the path of this asset points to a directory
-     *
-     * @return
-     */
-    @Override
-    public boolean isContainer() {
-        if (Files.exists(assetPath)) {
-            return Files.isDirectory(assetPath);
-        } else {
-            return directoryHint;
-        }
-    }
-
-    /**
-     * Returns the list of directory entries, if this asset represents a directory.
-     * Otherwise a empty list will be returned.
-     *
-     * @return The list of entries in the directory, if it exists.
-     */
-    @Override
-    public List<StorageAsset> list() {
-        try {
-            return Files.list(assetPath).map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p)))
-                    .collect(Collectors.toList());
-        } catch (IOException e) {
-            return Collections.EMPTY_LIST;
-        }
-    }
-
-    /**
-     * Returns the size of the represented file. If it cannot be determined, -1 is returned.
-     *
-     * @return
-     */
-    @Override
-    public long getSize() {
-        try {
-            return Files.size(assetPath);
-        } catch (IOException e) {
-            return -1;
-        }
-    }
-
-    /**
-     * Returns a input stream to the underlying file, if it exists. The caller has to make sure, that
-     * the stream is closed after it was used.
-     *
-     * @return
-     * @throws IOException
-     */
-    @Override
-    public InputStream getReadStream() throws IOException {
-        if (isContainer()) {
-            throw new IOException("Can not create input stream for container");
-        }
-        return Files.newInputStream(assetPath);
-    }
-
-    @Override
-    public ReadableByteChannel getReadChannel( ) throws IOException
-    {
-        return FileChannel.open( assetPath, StandardOpenOption.READ );
-    }
-
-    private OpenOption[] getOpenOptions(boolean replace) {
-        return replace ? REPLACE_OPTIONS : APPEND_OPTIONS;
-    }
-
-    @Override
-    public OutputStream getWriteStream( boolean replace) throws IOException {
-        OpenOption[] options = getOpenOptions( replace );
-        if (!Files.exists( assetPath )) {
-            create();
-        }
-        return Files.newOutputStream(assetPath, options);
-    }
-
-    @Override
-    public WritableByteChannel getWriteChannel( boolean replace ) throws IOException
-    {
-        OpenOption[] options = getOpenOptions( replace );
-        return FileChannel.open( assetPath, options );
-    }
-
-    @Override
-    public boolean replaceDataFromFile( Path newData) throws IOException {
-        final boolean createNew = !Files.exists(assetPath);
-        Path backup = null;
-        if (!createNew) {
-            backup = findBackupFile(assetPath);
-        }
-        try {
-            if (!createNew) {
-                Files.move(assetPath, backup);
-            }
-            Files.move(newData, assetPath, StandardCopyOption.REPLACE_EXISTING);
-            applyDefaultPermissions(assetPath);
-            return true;
-        } catch (IOException e) {
-            log.error("Could not overwrite file {}", assetPath);
-            // Revert if possible
-            if (backup != null && Files.exists(backup)) {
-                Files.move(backup, assetPath, StandardCopyOption.REPLACE_EXISTING);
-            }
-            throw e;
-        } finally {
-            if (backup != null) {
-                try {
-                    Files.deleteIfExists(backup);
-                } catch (IOException e) {
-                    log.error("Could not delete backup file {}", backup);
-                }
-            }
-        }
-
-    }
-
-    private void applyDefaultPermissions(Path filePath) {
-        try {
-            if (supportsPosix) {
-                Set<PosixFilePermission> perms;
-                if (Files.isDirectory(filePath)) {
-                    perms = defaultPosixFilePermissions;
-                } else {
-                    perms = defaultPosixDirectoryPermissions;
-                }
-                Files.setPosixFilePermissions(filePath, perms);
-            } else if (supportsAcl) {
-                List<AclEntry> perms;
-                if (Files.isDirectory(filePath)) {
-                    perms = getDefaultDirectoryAcls();
-                } else {
-                    perms = getDefaultFileAcls();
-                }
-                AclFileAttributeView aclAttr = Files.getFileAttributeView(filePath, AclFileAttributeView.class);
-                aclAttr.setAcl(perms);
-            }
-        } catch (IOException e) {
-            log.error("Could not set permissions for {}: {}", filePath, e.getMessage());
-        }
-    }
-
-    private Path findBackupFile(Path file) {
-        String ext = ".bak";
-        Path backupPath = file.getParent().resolve(file.getFileName().toString() + ext);
-        int idx = 0;
-        while (Files.exists(backupPath)) {
-            backupPath = file.getParent().resolve(file.getFileName().toString() + ext + idx++);
-        }
-        return backupPath;
-    }
-
-    @Override
-    public boolean exists() {
-        return Files.exists(assetPath);
-    }
-
-    @Override
-    public Path getFilePath() throws UnsupportedOperationException {
-        return assetPath;
-    }
-
-    @Override
-    public boolean isFileBased( )
-    {
-        return true;
-    }
-
-    @Override
-    public boolean hasParent( )
-    {
-        if (basePath!=null && assetPath.equals(basePath)) {
-                return false;
-        }
-        return assetPath.getParent()!=null;
-    }
-
-    @Override
-    public StorageAsset getParent( )
-    {
-        Path parentPath;
-        if (basePath!=null && assetPath.equals( basePath )) {
-            parentPath=null;
-        } else
-        {
-            parentPath = assetPath.getParent( );
-        }
-        String relativeParent = StringUtils.substringBeforeLast( relativePath,"/");
-        if (parentPath!=null) {
-            return new FilesystemAsset(storage, relativeParent, parentPath, basePath, true, setPermissionsForNew );
-        } else {
-            return null;
-        }
-    }
-
-
-    public void setDefaultFileAcls(List<AclEntry> acl) {
-        defaultFileAcls = acl;
-    }
-
-    public List<AclEntry> getDefaultFileAcls() {
-        return defaultFileAcls;
-    }
-
-    public void setDefaultPosixFilePermissions(Set<PosixFilePermission> perms) {
-        defaultPosixFilePermissions = perms;
-    }
-
-    public Set<PosixFilePermission> getDefaultPosixFilePermissions() {
-        return defaultPosixFilePermissions;
-    }
-
-    public void setDefaultDirectoryAcls(List<AclEntry> acl) {
-        defaultDirectoryAcls = acl;
-    }
-
-    public List<AclEntry> getDefaultDirectoryAcls() {
-        return defaultDirectoryAcls;
-    }
-
-    public void setDefaultPosixDirectoryPermissions(Set<PosixFilePermission> perms) {
-        defaultPosixDirectoryPermissions = perms;
-    }
-
-    public Set<PosixFilePermission> getDefaultPosixDirectoryPermissions() {
-        return defaultPosixDirectoryPermissions;
-    }
-
-    @Override
-    public void create() throws IOException {
-        if (!Files.exists(assetPath)) {
-            if (directoryHint) {
-                Files.createDirectories(assetPath);
-            } else {
-                if (!Files.exists( assetPath.getParent() )) {
-                    Files.createDirectories( assetPath.getParent( ) );
-                }
-                Files.createFile(assetPath);
-            }
-            if (setPermissionsForNew) {
-                applyDefaultPermissions(assetPath);
-            }
-        }
-    }
-
-    @Override
-    public String toString() {
-        return relativePath+":"+assetPath;
-    }
-
-}
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/FilesystemStorage.java
deleted file mode 100644 (file)
index 92044fa..0000000
+++ /dev/null
@@ -1,376 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.FileLockException;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.common.filelock.FileLockTimeoutException;
-import org.apache.archiva.common.filelock.Lock;
-import org.apache.commons.io.FileUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.CopyOption;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.nio.file.StandardOpenOption;
-import java.util.function.Consumer;
-
-/**
- * Implementation of <code>{@link RepositoryStorage}</code> where data is stored in the filesystem.
- *
- * All files are relative to a given base path. Path values are separated by '/', '..' is allowed to navigate
- * to a parent directory, but navigation out of the base path will lead to a exception.
- */
-public class FilesystemStorage implements RepositoryStorage {
-
-    private static final Logger log = LoggerFactory.getLogger(FilesystemStorage.class);
-
-    private final Path basePath;
-    private final FileLockManager fileLockManager;
-
-    public FilesystemStorage(Path basePath, FileLockManager fileLockManager) throws IOException {
-        if (!Files.exists(basePath)) {
-            Files.createDirectories(basePath);
-        }
-        this.basePath = basePath.normalize().toRealPath();
-        this.fileLockManager = fileLockManager;
-    }
-
-    private Path normalize(final String path) {
-        String nPath = path;
-        while (nPath.startsWith("/")) {
-            nPath = nPath.substring(1);
-        }
-        return Paths.get(nPath);
-    }
-
-    private Path getAssetPath(String path) throws IOException {
-        Path assetPath = basePath.resolve(normalize(path)).normalize();
-        if (!assetPath.startsWith(basePath))
-        {
-            throw new IOException("Path navigation out of allowed scope: "+path);
-        }
-        return assetPath;
-    }
-
-    @Override
-    public void consumeData( StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
-    {
-        final Path path = asset.getFilePath();
-        try {
-            if (readLock) {
-                consumeDataLocked( path, consumerFunction );
-            } else
-            {
-                try ( InputStream is = Files.newInputStream( path ) )
-                {
-                    consumerFunction.accept( is );
-                }
-                catch ( IOException e )
-                {
-                    log.error("Could not read the input stream from file {}", path);
-                    throw e;
-                }
-            }
-        } catch (RuntimeException e)
-        {
-            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
-            throw new IOException( e );
-        }
-
-    }
-
-    @Override
-    public void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock ) throws IOException
-    {
-        final Path path = asset.getFilePath();
-        try {
-            if (readLock) {
-                consumeDataFromChannelLocked( path, consumerFunction );
-            } else
-            {
-                try ( FileChannel is = FileChannel.open( path, StandardOpenOption.READ ) )
-                {
-                    consumerFunction.accept( is );
-                }
-                catch ( IOException e )
-                {
-                    log.error("Could not read the input stream from file {}", path);
-                    throw e;
-                }
-            }
-        } catch (RuntimeException e)
-        {
-            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
-            throw new IOException( e );
-        }
-    }
-
-    @Override
-    public void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock ) throws IOException
-    {
-        final Path path = asset.getFilePath();
-        try {
-            if (writeLock) {
-                writeDataLocked( path, consumerFunction );
-            } else
-            {
-                try ( OutputStream is = Files.newOutputStream( path ) )
-                {
-                    consumerFunction.accept( is );
-                }
-                catch ( IOException e )
-                {
-                    log.error("Could not write the output stream to file {}", path);
-                    throw e;
-                }
-            }
-        } catch (RuntimeException e)
-        {
-            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
-            throw new IOException( e );
-        }
-
-    }
-
-    @Override
-    public void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock ) throws IOException
-    {
-        final Path path = asset.getFilePath();
-        try {
-            if (writeLock) {
-                writeDataToChannelLocked( path, consumerFunction );
-            } else
-            {
-                try ( FileChannel os = FileChannel.open( path, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE ))
-                {
-                    consumerFunction.accept( os );
-                }
-                catch ( IOException e )
-                {
-                    log.error("Could not write the data to file {}", path);
-                    throw e;
-                }
-            }
-        } catch (RuntimeException e)
-        {
-            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
-            throw new IOException( e );
-        }
-    }
-
-    private void consumeDataLocked( Path file, Consumer<InputStream> consumerFunction) throws IOException
-    {
-
-        final Lock lock;
-        try
-        {
-            lock = fileLockManager.readFileLock( file );
-            try ( InputStream is = Files.newInputStream( lock.getFile()))
-            {
-                consumerFunction.accept( is );
-            }
-            catch ( IOException e )
-            {
-                log.error("Could not read the input stream from file {}", file);
-                throw e;
-            } finally
-            {
-                fileLockManager.release( lock );
-            }
-        }
-        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
-        {
-            log.error("Locking error on file {}", file);
-            throw new IOException(e);
-        }
-    }
-
-    private void consumeDataFromChannelLocked( Path file, Consumer<ReadableByteChannel> consumerFunction) throws IOException
-    {
-
-        final Lock lock;
-        try
-        {
-            lock = fileLockManager.readFileLock( file );
-            try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.READ ))
-            {
-                consumerFunction.accept( is );
-            }
-            catch ( IOException e )
-            {
-                log.error("Could not read the input stream from file {}", file);
-                throw e;
-            } finally
-            {
-                fileLockManager.release( lock );
-            }
-        }
-        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
-        {
-            log.error("Locking error on file {}", file);
-            throw new IOException(e);
-        }
-    }
-
-
-    private void writeDataLocked( Path file, Consumer<OutputStream> consumerFunction) throws IOException
-    {
-
-        final Lock lock;
-        try
-        {
-            lock = fileLockManager.writeFileLock( file );
-            try ( OutputStream is = Files.newOutputStream( lock.getFile()))
-            {
-                consumerFunction.accept( is );
-            }
-            catch ( IOException e )
-            {
-                log.error("Could not write the output stream to file {}", file);
-                throw e;
-            } finally
-            {
-                fileLockManager.release( lock );
-            }
-        }
-        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
-        {
-            log.error("Locking error on file {}", file);
-            throw new IOException(e);
-        }
-    }
-
-    private void writeDataToChannelLocked( Path file, Consumer<WritableByteChannel> consumerFunction) throws IOException
-    {
-
-        final Lock lock;
-        try
-        {
-            lock = fileLockManager.writeFileLock( file );
-            try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE ))
-            {
-                consumerFunction.accept( is );
-            }
-            catch ( IOException e )
-            {
-                log.error("Could not write to file {}", file);
-                throw e;
-            } finally
-            {
-                fileLockManager.release( lock );
-            }
-        }
-        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
-        {
-            log.error("Locking error on file {}", file);
-            throw new IOException(e);
-        }
-    }
-
-    @Override
-    public StorageAsset getAsset( String path )
-    {
-        try {
-            return new FilesystemAsset(this, path, getAssetPath(path));
-        } catch (IOException e) {
-            throw new IllegalArgumentException("Path navigates outside of base directory "+path);
-        }
-    }
-
-    @Override
-    public StorageAsset addAsset( String path, boolean container )
-    {
-        try {
-            return new FilesystemAsset(this, path, getAssetPath(path), basePath, container);
-        } catch (IOException e) {
-            throw new IllegalArgumentException("Path navigates outside of base directory "+path);
-        }
-    }
-
-    @Override
-    public void removeAsset( StorageAsset asset ) throws IOException
-    {
-        Files.delete(asset.getFilePath());
-    }
-
-    @Override
-    public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
-    {
-        boolean container = origin.isContainer();
-        FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container );
-        moveAsset( origin, newAsset, copyOptions );
-        return newAsset;
-    }
-
-    @Override
-    public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
-    {
-        Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions);
-    }
-
-    @Override
-    public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
-    {
-        boolean container = origin.isContainer();
-        FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container );
-        copyAsset( origin, newAsset, copyOptions );
-        return newAsset;
-    }
-
-    @Override
-    public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
-    {
-        Path destinationPath = destination.getFilePath();
-        boolean overwrite = false;
-        for (int i=0; i<copyOptions.length; i++) {
-            if (copyOptions[i].equals( StandardCopyOption.REPLACE_EXISTING )) {
-                overwrite=true;
-            }
-        }
-        if (Files.exists(destinationPath) && !overwrite) {
-            throw new IOException("Destination file exists already "+ destinationPath);
-        }
-        if (Files.isDirectory( origin.getFilePath() ))
-        {
-            FileUtils.copyDirectory(origin.getFilePath( ).toFile(), destinationPath.toFile() );
-        } else if (Files.isRegularFile( origin.getFilePath() )) {
-            if (!Files.exists( destinationPath )) {
-                Files.createDirectories( destinationPath );
-            }
-            Files.copy( origin.getFilePath( ), destinationPath, copyOptions );
-        }
-    }
-
-    public FileLockManager getFileLockManager() {
-        return fileLockManager;
-    }
-
-}
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/StorageUtil.java b/archiva-modules/archiva-base/archiva-repository-layer/src/main/java/org/apache/archiva/repository/content/StorageUtil.java
deleted file mode 100644 (file)
index 4b8d11b..0000000
+++ /dev/null
@@ -1,192 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.FileLockException;
-import org.apache.archiva.common.filelock.FileLockManager;
-import org.apache.archiva.common.filelock.FileLockTimeoutException;
-import org.apache.archiva.common.filelock.Lock;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.nio.file.CopyOption;
-import java.nio.file.Files;
-import java.nio.file.Path;
-
-/**
- * @author Martin Stockhammer <martin_s@apache.org>
- */
-public class StorageUtil
-{
-    private static final int DEFAULT_BUFFER_SIZE = 4096;
-
-    /**
-     * Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
-     *
-     * @param source The source asset
-     * @param target The target asset
-     * @param locked If true, a readlock is set on the source and a write lock is set on the target.
-     * @param copyOptions Copy options
-     * @throws IOException
-     */
-    public static final void copyAsset( final StorageAsset source,
-                                        final StorageAsset target,
-                                        boolean locked,
-                                        final CopyOption... copyOptions ) throws IOException
-    {
-        if (source.isFileBased() && target.isFileBased()) {
-            // Short cut for FS operations
-            final Path sourcePath = source.getFilePath();
-            final Path targetPath = target.getFilePath( );
-            if (locked) {
-                final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
-                final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
-                try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
-                {
-                    Files.copy( sourcePath, targetPath, copyOptions );
-                }
-                catch ( FileLockException e )
-                {
-                    throw new IOException( e );
-                }
-                catch ( FileLockTimeoutException e )
-                {
-                    throw new IOException( e );
-                }
-            } else
-            {
-                Files.copy( sourcePath, targetPath, copyOptions );
-            }
-        } else {
-            try {
-                final RepositoryStorage sourceStorage = source.getStorage();
-                final RepositoryStorage targetStorage = target.getStorage();
-                sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
-            }  catch (IOException e) {
-                throw e;
-            }  catch (Throwable e) {
-                Throwable cause = e.getCause();
-                if (cause instanceof IOException) {
-                    throw (IOException)cause;
-                } else
-                {
-                    throw new IOException( e );
-                }
-            }
-        }
-    }
-
-    /**
-     *
-     * @param source
-     * @param target
-     * @param locked
-     * @param copyOptions
-     * @throws IOException
-     */
-    public static void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
-    {
-        if (source.isFileBased() && target.isFileBased()) {
-            // Short cut for FS operations
-            // Move is atomic operation
-            Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
-        } else {
-            try {
-                final RepositoryStorage sourceStorage = source.getStorage();
-                final RepositoryStorage targetStorage = target.getStorage();
-                sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
-                sourceStorage.removeAsset( source );
-            }  catch (IOException e) {
-                throw e;
-            }  catch (Throwable e) {
-                Throwable cause = e.getCause();
-                if (cause instanceof IOException) {
-                    throw (IOException)cause;
-                } else
-                {
-                    throw new IOException( e );
-                }
-            }
-        }
-
-    }
-
-    private static void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
-        try {
-            targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
-        } catch (Exception e) {
-            throw new RuntimeException( e );
-        }
-    }
-
-
-    private static void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
-        if (is instanceof FileChannel) {
-            copy( (FileChannel) is, os );
-        } else if (os instanceof FileChannel) {
-            copy(is, (FileChannel)os);
-        } else
-        {
-            try
-            {
-                ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
-                while ( is.read( buffer ) != -1 )
-                {
-                    buffer.flip( );
-                    while ( buffer.hasRemaining( ) )
-                    {
-                        os.write( buffer );
-                    }
-                    buffer.clear( );
-                }
-            }
-            catch ( IOException e )
-            {
-                throw new RuntimeException( e );
-            }
-        }
-    }
-
-    private static void copy( final FileChannel is, final WritableByteChannel os ) {
-        try
-        {
-            is.transferTo( 0, is.size( ), os );
-        }
-        catch ( IOException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-
-    private static void copy( final ReadableByteChannel is, final FileChannel os ) {
-        try
-        {
-            os.transferFrom( is, 0, Long.MAX_VALUE );
-        }
-        catch ( IOException e )
-        {
-            throw new RuntimeException( e );
-        }
-    }
-
-}
index 9df3f49c60169620447d85cc5311ec09c979629a..57cc2c33106369961155b07a14b2dca8b8e573c5 100644 (file)
@@ -42,6 +42,7 @@ import org.apache.archiva.repository.ContentNotFoundException;
 import org.apache.archiva.repository.LayoutException;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RemoteRepositoryContent;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
@@ -369,9 +370,9 @@ public class MetadataTools
                                                         ProjectReference reference, String proxyId )
     {
         String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
-        Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
+        StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
 
-        if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile ))
+        if ( !metadataFile.exists() || metadataFile.isContainer())
         {
             // Nothing to do. return null.
             return null;
@@ -381,11 +382,11 @@ public class MetadataTools
         {
             return MavenMetadataReader.read( metadataFile );
         }
-        catch ( XMLException e )
+        catch (XMLException | IOException e )
         {
             // TODO: [monitor] consider a monitor for this event.
             // TODO: consider a read-redo on monitor return code?
-            log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
+            log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
             return null;
         }
     }
@@ -394,9 +395,9 @@ public class MetadataTools
                                                         String logicalResource, String proxyId )
     {
         String metadataPath = getRepositorySpecificName( proxyId, logicalResource );
-        Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
+        StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
 
-        if ( !Files.exists(metadataFile) || !Files.isRegularFile( metadataFile))
+        if ( !metadataFile.exists() || metadataFile.isContainer())
         {
             // Nothing to do. return null.
             return null;
@@ -406,11 +407,11 @@ public class MetadataTools
         {
             return MavenMetadataReader.read( metadataFile );
         }
-        catch ( XMLException e )
+        catch (XMLException | IOException e )
         {
             // TODO: [monitor] consider a monitor for this event.
             // TODO: consider a read-redo on monitor return code?
-            log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
+            log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
             return null;
         }
     }
@@ -419,9 +420,9 @@ public class MetadataTools
                                                         VersionedReference reference, String proxyId )
     {
         String metadataPath = getRepositorySpecificName( proxyId, toPath( reference ) );
-        Path metadataFile = Paths.get( managedRepository.getRepoRoot(), metadataPath );
+        StorageAsset metadataFile = managedRepository.getRepository().getAsset( metadataPath );
 
-        if ( !Files.exists(metadataFile) || !Files.isRegularFile(metadataFile))
+        if ( !metadataFile.exists() || metadataFile.isContainer())
         {
             // Nothing to do. return null.
             return null;
@@ -431,11 +432,11 @@ public class MetadataTools
         {
             return MavenMetadataReader.read( metadataFile );
         }
-        catch ( XMLException e )
+        catch (XMLException | IOException e )
         {
             // TODO: [monitor] consider a monitor for this event.
             // TODO: consider a read-redo on monitor return code?
-            log.warn( "Unable to read metadata: {}", metadataFile.toAbsolutePath(), e );
+            log.warn( "Unable to read metadata: {}", metadataFile.getPath(), e );
             return null;
         }
     }
@@ -443,7 +444,7 @@ public class MetadataTools
     public void updateMetadata( ManagedRepositoryContent managedRepository, String logicalResource )
         throws RepositoryMetadataException
     {
-        final Path metadataFile = Paths.get( managedRepository.getRepoRoot(), logicalResource );
+        final StorageAsset metadataFile = managedRepository.getRepository().getAsset( logicalResource );
         ArchivaRepositoryMetadata metadata = null;
 
         //Gather and merge all metadata available
@@ -480,7 +481,7 @@ public class MetadataTools
 
         RepositoryMetadataWriter.write( metadata, metadataFile );
 
-        ChecksummedFile checksum = new ChecksummedFile( metadataFile );
+        ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
         checksum.fixChecksums( algorithms );
     }
 
@@ -491,30 +492,17 @@ public class MetadataTools
      * @param metadataParentDirectory
      * @return origional set plus newly found versions
      */
-    private Set<String> findPossibleVersions( Set<String> versions, Path metadataParentDirectory )
+    private Set<String> findPossibleVersions( Set<String> versions, StorageAsset metadataParentDirectory )
     {
 
         Set<String> result = new HashSet<String>( versions );
 
-        try (Stream<Path> stream = Files.list( metadataParentDirectory )) {
-            stream.filter( Files::isDirectory ).filter(
-                p ->
-                {
-                    try(Stream<Path> substream = Files.list(p))
-                    {
-                        return substream.anyMatch( f -> Files.isRegularFile( f ) && f.toString().endsWith( ".pom" ));
-                    }
-                    catch ( IOException e )
-                    {
-                        return false;
-                    }
+        metadataParentDirectory.list().stream().filter(asset ->
+                asset.isContainer()).filter(asset -> {
+                    return asset.list().stream().anyMatch(f -> !f.isContainer() && f.getName().endsWith(".pom"));
                 }
-            ).forEach(
-                p -> result.add(p.getFileName().toString())
-            );
-        } catch (IOException e) {
-            //
-        }
+                ).forEach( p -> result.add(p.getName()));
+
         return result;
     }
 
@@ -522,8 +510,9 @@ public class MetadataTools
         ManagedRepositoryContent managedRepository, String logicalResource )
     {
         List<ArchivaRepositoryMetadata> metadatas = new ArrayList<>();
-        Path file = Paths.get( managedRepository.getRepoRoot(), logicalResource );
-        if ( Files.exists(file) )
+        StorageAsset file = managedRepository.getRepository().getAsset( logicalResource );
+
+        if ( file.exists() )
         {
             try
             {
@@ -533,10 +522,14 @@ public class MetadataTools
                     metadatas.add( existingMetadata );
                 }
             }
-            catch ( XMLException e )
+            catch (XMLException | IOException e )
             {
-                log.debug( "Could not read metadata at {}. Metadata will be removed.", file.toAbsolutePath() );
-                FileUtils.deleteQuietly( file );
+                log.debug( "Could not read metadata at {}. Metadata will be removed.", file.getPath() );
+                try {
+                    file.getStorage().removeAsset(file);
+                } catch (IOException ex) {
+                    log.error("Could not remove asset {}", file.getPath());
+                }
             }
         }
 
@@ -578,7 +571,8 @@ public class MetadataTools
     public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectReference reference )
         throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
     {
-        Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );
+
+        StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
 
         long lastUpdated = getExistingLastUpdated( metadataFile );
 
@@ -593,7 +587,7 @@ public class MetadataTools
         // TODO: do we know this information instead?
 //        Set<Plugin> allPlugins = managedRepository.getPlugins( reference );
         Set<Plugin> allPlugins;
-        if ( Files.exists(metadataFile))
+        if ( metadataFile.exists())
         {
             try
             {
@@ -653,7 +647,7 @@ public class MetadataTools
 
         // Save the metadata model to disk.
         RepositoryMetadataWriter.write( metadata, metadataFile );
-        ChecksummedFile checksum = new ChecksummedFile( metadataFile );
+        ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
         checksum.fixChecksums( algorithms );
     }
 
@@ -748,9 +742,9 @@ public class MetadataTools
         }
     }
 
-    private long getExistingLastUpdated( Path metadataFile )
+    private long getExistingLastUpdated( StorageAsset metadataFile )
     {
-        if ( !Files.exists(metadataFile) )
+        if ( !metadataFile.exists() )
         {
             // Doesn't exist.
             return 0;
@@ -762,7 +756,7 @@ public class MetadataTools
 
             return getLastUpdated( metadata );
         }
-        catch ( XMLException e )
+        catch (XMLException | IOException e )
         {
             // Error.
             return 0;
@@ -788,7 +782,7 @@ public class MetadataTools
     public void updateMetadata( ManagedRepositoryContent managedRepository, VersionedReference reference )
         throws LayoutException, RepositoryMetadataException, IOException, ContentNotFoundException
     {
-        Path metadataFile = Paths.get( managedRepository.getRepoRoot(), toPath( reference ) );
+        StorageAsset metadataFile = managedRepository.getRepository().getAsset( toPath( reference ) );
 
         long lastUpdated = getExistingLastUpdated( metadataFile );
 
@@ -893,7 +887,7 @@ public class MetadataTools
 
         // Save the metadata model to disk.
         RepositoryMetadataWriter.write( metadata, metadataFile );
-        ChecksummedFile checksum = new ChecksummedFile( metadataFile );
+        ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
         checksum.fixChecksums( algorithms );
     }
 
index 4585d59ff793812c1d06d5651c577fb4c30c6bfd..6f6bf6608e26a9de91a8b795c4d61bca7d7ef840 100644 (file)
@@ -22,6 +22,7 @@ package org.apache.archiva.repository.metadata;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.model.ArchivaRepositoryMetadata;
 import org.apache.archiva.model.Plugin;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.archiva.xml.XMLWriter;
 import org.apache.commons.collections4.CollectionUtils;
@@ -29,9 +30,12 @@ import org.apache.commons.lang.StringUtils;
 import org.dom4j.Document;
 import org.dom4j.DocumentHelper;
 import org.dom4j.Element;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.FileWriter;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.file.Path;
 import java.util.Collections;
@@ -44,11 +48,13 @@ import java.util.List;
  */
 public class RepositoryMetadataWriter
 {
-    public static void write( ArchivaRepositoryMetadata metadata, Path outputFile )
+    private static final Logger log = LoggerFactory.getLogger(RepositoryMetadataWriter.class);
+
+    public static void write( ArchivaRepositoryMetadata metadata, StorageAsset outputFile )
         throws RepositoryMetadataException
     {
         boolean thrown = false;
-        try (FileWriter writer = new FileWriter( outputFile.toFile() ))
+        try (OutputStreamWriter writer = new OutputStreamWriter( outputFile.getWriteStream(true)))
         {
             write( metadata, writer );
             writer.flush();
@@ -57,13 +63,17 @@ public class RepositoryMetadataWriter
         {
             thrown = true;
             throw new RepositoryMetadataException(
-                "Unable to write metadata file: " + outputFile.toAbsolutePath() + " - " + e.getMessage(), e );
+                "Unable to write metadata file: " + outputFile.getPath() + " - " + e.getMessage(), e );
         }
         finally
         {
             if ( thrown )
             {
-                FileUtils.deleteQuietly( outputFile );
+                try {
+                    outputFile.getStorage().removeAsset(outputFile);
+                } catch (IOException e) {
+                    log.error("Could not remove asset {}", outputFile);
+                }
             }
         }
     }
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemAssetTest.java
deleted file mode 100644 (file)
index 8e98e59..0000000
+++ /dev/null
@@ -1,202 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.*;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.time.Instant;
-
-import static org.junit.Assert.*;
-
-public class FilesystemAssetTest {
-
-    Path assetPathFile;
-    Path assetPathDir;
-
-    @Before
-    public void init() throws IOException {
-        assetPathFile = Files.createTempFile("assetFile", "dat");
-        assetPathDir = Files.createTempDirectory("assetDir");
-    }
-
-    @After
-    public void cleanup() {
-
-        try {
-            Files.deleteIfExists(assetPathFile);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            Files.deleteIfExists(assetPathDir);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-
-    @Test
-    public void getPath() {
-        FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
-        assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
-    }
-
-    @Test
-    public void getName() {
-        FilesystemAsset asset = new FilesystemAsset("/"+assetPathFile.getFileName().toString(), assetPathFile);
-        assertEquals(assetPathFile.getFileName().toString(), asset.getName());
-
-    }
-
-    @Test
-    public void getModificationTime() throws IOException {
-        Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
-        FilesystemAsset asset = new FilesystemAsset("/test123", assetPathFile);
-        assertTrue(modTime.equals(asset.getModificationTime()));
-    }
-
-    @Test
-    public void isContainer() {
-        FilesystemAsset asset = new FilesystemAsset("/test1323", assetPathFile);
-        assertFalse(asset.isContainer());
-        FilesystemAsset asset2 = new FilesystemAsset("/test1234", assetPathDir);
-        assertTrue(asset2.isContainer());
-    }
-
-    @Test
-    public void list() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        assertEquals(0, asset.list().size());
-
-        FilesystemAsset asset2 = new FilesystemAsset("/test1235", assetPathDir);
-        assertEquals(0, asset2.list().size());
-        Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
-        Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
-        Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
-        assertEquals(3, asset2.list().size());
-        assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
-        assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
-        assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
-        Files.deleteIfExists(f1);
-        Files.deleteIfExists(f2);
-        Files.deleteIfExists(d1);
-
-
-    }
-
-    @Test
-    public void getSize() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        assertEquals(0, asset.getSize());
-
-        Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
-        assertTrue(asset.getSize()>=6);
-
-
-    }
-
-    @Test
-    public void getData() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
-        try(InputStream is = asset.getReadStream()) {
-            assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
-        }
-
-    }
-
-    @Test
-    public void getDataExceptionOnDir() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
-        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
-        try {
-            InputStream is = asset.getReadStream();
-            assertFalse("Exception expected for data on dir", true);
-        } catch (IOException e) {
-            // fine
-        }
-
-    }
-
-    @Test
-    public void writeData() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
-        try(OutputStream os  = asset.getWriteStream(true)) {
-            IOUtils.write("test12345", os, "ASCII");
-        }
-        assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
-    }
-
-    @Test
-    public void writeDataAppend() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
-        try(OutputStream os  = asset.getWriteStream(false)) {
-            IOUtils.write("test12345", os, "ASCII");
-        }
-        assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
-    }
-
-    @Test
-    public void writeDataExceptionOnDir() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathDir);
-        try {
-
-            OutputStream os = asset.getWriteStream(true);
-            assertTrue("Writing to a directory should throw a IOException", false);
-        } catch (IOException e) {
-            // Fine
-        }
-    }
-
-    @Test
-    public void storeDataFile() throws IOException {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        Path dataFile = Files.createTempFile("testdata", "dat");
-        try(OutputStream os = Files.newOutputStream(dataFile)) {
-            IOUtils.write("testkdkdkd", os, "ASCII");
-        }
-        asset.replaceDataFromFile(dataFile);
-        assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
-    }
-
-    @Test
-    public void exists() {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        assertTrue(asset.exists());
-        FilesystemAsset asset2 = new FilesystemAsset("/test1234", Paths.get("abcdefgkdkdk"));
-        assertFalse(asset2.exists());
-
-    }
-
-    @Test
-    public void getFilePath() {
-        FilesystemAsset asset = new FilesystemAsset("/test1234", assetPathFile);
-        assertEquals(assetPathFile, asset.getFilePath());
-    }
-}
\ No newline at end of file
diff --git a/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java b/archiva-modules/archiva-base/archiva-repository-layer/src/test/java/org/apache/archiva/repository/content/FilesystemStorageTest.java
deleted file mode 100644 (file)
index 309c755..0000000
+++ /dev/null
@@ -1,208 +0,0 @@
-package org.apache.archiva.repository.content;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *  http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.filelock.DefaultFileLockManager;
-import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-
-import static org.junit.Assert.*;
-
-public class FilesystemStorageTest {
-
-    private FilesystemStorage fsStorage;
-    private FilesystemAsset file1Asset;
-    private FilesystemAsset dir1Asset;
-    private Path baseDir;
-    private Path file1;
-    private Path dir1;
-
-    @Before
-    public void init() throws IOException {
-        baseDir = Files.createTempDirectory("FsStorageTest");
-        DefaultFileLockManager fl = new DefaultFileLockManager();
-        fsStorage = new FilesystemStorage(baseDir,fl);
-        Files.createDirectories(baseDir.resolve("dir1"));
-        Files.createDirectories(baseDir.resolve("dir2"));
-        file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat"));
-        dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir"));
-        file1Asset = new FilesystemAsset("/dir1/testfile1.dat", file1);
-        dir1Asset = new FilesystemAsset("/dir1/testdir", dir1);
-    }
-
-    private class StringResult {
-        public String getData() {
-            return data;
-        }
-
-        public void setData(String data) {
-            this.data = data;
-        }
-
-        String data;
-    }
-
-
-    @After
-    public void cleanup() {
-        try {
-            Files.deleteIfExists(file1);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            Files.deleteIfExists(dir1);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            Files.deleteIfExists(baseDir.resolve("dir1"));
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            Files.deleteIfExists(baseDir.resolve("dir2"));
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            Files.deleteIfExists(baseDir);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-
-
-
-    @Test
-    public void consumeData() throws IOException {
-        try(OutputStream os = Files.newOutputStream(file1)) {
-            IOUtils.write("abcdefghijkl", os, "ASCII");
-        }
-        StringResult result = new StringResult();
-        fsStorage.consumeData(file1Asset, is -> consume(is, result), false );
-        assertEquals("abcdefghijkl" ,result.getData());
-    }
-
-    private void consume(InputStream is, StringResult result) {
-        try {
-            result.setData(IOUtils.toString(is, "ASCII"));
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    }
-
-
-    @Test
-    public void getAsset() {
-        StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat");
-        assertEquals(file1, asset.getFilePath());
-    }
-
-    @Test
-    public void addAsset() {
-        StorageAsset newAsset = fsStorage.addAsset("dir2/test", false);
-        assertNotNull(newAsset);
-        assertFalse(newAsset.isContainer());
-        assertFalse(newAsset.exists());
-
-        StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true);
-        assertNotNull(newDirAsset);
-        assertTrue(newDirAsset.isContainer());
-        assertFalse(newDirAsset.exists());
-    }
-
-    @Test
-    public void removeAsset() throws IOException {
-        assertTrue(Files.exists(file1));
-        fsStorage.removeAsset(file1Asset);
-        assertFalse(Files.exists(file1));
-
-        assertTrue(Files.exists(dir1));
-        fsStorage.removeAsset(dir1Asset);
-        assertFalse(Files.exists(dir1));
-    }
-
-    @Test
-    public void moveAsset() throws IOException {
-        Path newFile=null;
-        Path newDir=null;
-        try {
-            assertTrue(Files.exists(file1));
-            try (OutputStream os = Files.newOutputStream(file1)) {
-                IOUtils.write("testakdkkdkdkdk", os, "ASCII");
-            }
-            long fileSize = Files.size(file1);
-            fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat");
-            assertFalse(Files.exists(file1));
-            newFile = baseDir.resolve("dir2/testfile2.dat");
-            assertTrue(Files.exists(newFile));
-            assertEquals(fileSize, Files.size(newFile));
-
-
-            assertTrue(Files.exists(dir1));
-            newDir = baseDir.resolve("dir2/testdir2");
-            fsStorage.moveAsset(dir1Asset, "dir2/testdir2");
-            assertFalse(Files.exists(dir1));
-            assertTrue(Files.exists(newDir));
-        } finally {
-            if (newFile!=null) Files.deleteIfExists(newFile);
-            if (newDir!=null) Files.deleteIfExists(newDir);
-        }
-    }
-
-    @Test
-    public void copyAsset() throws IOException {
-        Path newFile=null;
-        Path newDir=null;
-        try {
-            assertTrue(Files.exists(file1));
-            try (OutputStream os = Files.newOutputStream(file1)) {
-                IOUtils.write("testakdkkdkdkdk", os, "ASCII");
-            }
-            long fileSize = Files.size(file1);
-            fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
-            assertTrue(Files.exists(file1));
-            assertEquals(fileSize, Files.size(file1));
-            newFile = baseDir.resolve("dir2/testfile2.dat");
-            assertTrue(Files.exists(newFile));
-            assertEquals(fileSize, Files.size(newFile));
-
-
-            assertTrue(Files.exists(dir1));
-            newDir = baseDir.resolve("dir2/testdir2");
-            fsStorage.copyAsset(dir1Asset, "dir2/testdir2");
-            assertTrue(Files.exists(dir1));
-            assertTrue(Files.exists(newDir));
-        } finally {
-            if (newFile!=null) Files.deleteIfExists(newFile);
-            if (newDir!=null) Files.deleteIfExists(newDir);
-        }
-    }
-}
\ No newline at end of file
index 328f58a5a86544e684e004f5cd7ba1b968361e1d..27b1e1f459defd47b2e833ce16f9bb8661216075 100644 (file)
@@ -28,7 +28,7 @@ import org.apache.archiva.repository.LayoutException;
 import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RepositoryException;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.springframework.stereotype.Service;
 
 import java.util.Set;
index b26bea9bdf293987da21fdd3a3f58115b40fcade..30abcb4372f8176dc2c0fa06e55775f6f9aa402c 100644 (file)
@@ -25,7 +25,7 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
 import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
 import org.apache.archiva.consumers.RepositoryContentConsumer;
 import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.collections4.CollectionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
index 5f6f7e0d503322df942160f21f7ff034c7aba85d..dec4e8dc9d6b2e99f02d29362c39edd48dcaae64 100644 (file)
@@ -29,7 +29,7 @@ import org.apache.archiva.repository.BasicRemoteRepository;
 import org.apache.archiva.repository.EditableManagedRepository;
 import org.apache.archiva.repository.EditableRemoteRepository;
 import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.content.FilesystemStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.scanner.mock.ManagedRepositoryContentMock;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.apache.commons.io.FileUtils;
index ade81214a292e52fbb07fbb3236655f10f99ae69..dadf98a35f2726ef8e2a6258d23daa0ad42063cd 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.repository.scanner.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.VersionUtil;
 import org.apache.archiva.metadata.model.ArtifactMetadata;
 import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
@@ -27,9 +28,11 @@ import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.model.ProjectReference;
 import org.apache.archiva.model.VersionedReference;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 
+import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.Map;
@@ -48,6 +51,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
 
 
     private ManagedRepository repository;
+    private FilesystemStorage fsStorage;
 
     public ManagedRepositoryContentMock(ManagedRepository repo) {
         this.repository = repo;
@@ -92,7 +96,18 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
     @Override
     public String getRepoRoot( )
     {
-        return Paths.get("", "target", "test-repository", "managed").toString();
+        return getRepoRootAsset().getFilePath().toString();
+    }
+
+    private StorageAsset getRepoRootAsset() {
+        if (fsStorage==null) {
+            try {
+                fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+        return fsStorage.getAsset("");
     }
 
     @Override
@@ -329,7 +344,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
     @Override
     public StorageAsset toFile( ArtifactReference reference )
     {
-        return Paths.get(getRepoRoot(), refs.get(reference));
+        return getRepoRootAsset().resolve(refs.get(reference));
     }
 
     @Override
diff --git a/archiva-modules/archiva-base/archiva-storage-api/pom.xml b/archiva-modules/archiva-base/archiva-storage-api/pom.xml
new file mode 100644 (file)
index 0000000..06c853d
--- /dev/null
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>archiva-base</artifactId>
+    <groupId>org.apache.archiva</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>archiva-storage-api</artifactId>
+
+  <name>Archiva Base :: Repository API</name>
+
+  <properties>
+    <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
+  </properties>
+
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <basedir>${basedir}</basedir>
+          </systemPropertyVariables>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
\ No newline at end of file
diff --git a/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/RepositoryStorage.java
new file mode 100644 (file)
index 0000000..68ad39b
--- /dev/null
@@ -0,0 +1,159 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.CopyOption;
+import java.util.function.Consumer;
+
+/**
+ *
+ * This is the low level API to access artifacts in a repository. Each artifact is represented
+ * by one storage asset. Each asset can be accessed by a path that is independent on the underlying storage
+ * implementation. Paths always use '/' as path separator. The path is local to the repository and
+ * is unique for each asset.
+ * The storage API knows nothing about the repository layout or repository specific metadata.
+ * If you use this API you must either have knowledge about the specific repository layout or use the structure
+ * as it is, e.g. for browsing.
+ *
+ * The base implementation for the storage uses a directory structure on the local filesystem.
+ *
+ *
+ * It is the decision of the repository type specific implementation, if this API provides access to all elements, that
+ * is really stored or just a selected view.
+ *
+ * Checking access is not part of this API.
+ */
+public interface RepositoryStorage {
+    /**
+     * Returns information about a specific storage asset.
+     * @param path
+     * @return
+     */
+    StorageAsset getAsset(String path);
+
+    /**
+     * Consumes the data and sets a lock for the file during the operation.
+     *
+     * @param asset The asset from which the data is consumed.
+     * @param consumerFunction The consumer that reads the data
+     * @param readLock If true, a read lock is acquired on the asset.
+     * @throws IOException
+     */
+    void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock) throws IOException;
+
+    /**
+     * Consumes the data and sets a lock for the file during the operation.
+     *
+     * @param asset The asset from which the data is consumed.
+     * @param consumerFunction The consumer that reads the data
+     * @param readLock If true, a read lock is acquired on the asset.
+     * @throws IOException
+     */
+    void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock) throws IOException;
+
+    /**
+     * Writes data to the asset using a write lock.
+     *
+     * @param asset The asset to which the data is written.
+     * @param consumerFunction The function that provides the data.
+     * @param writeLock If true, a write lock is acquired on the destination.
+     */
+    void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock) throws IOException;;
+
+    /**
+     * Writes data and sets a lock during the operation.
+     *
+     * @param asset The asset to which the data is written.
+     * @param consumerFunction The function that provides the data.
+     * @param writeLock If true, a write lock is acquired on the destination.
+     * @throws IOException
+     */
+    void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock) throws IOException;
+
+    /**
+     * Adds a new asset to the underlying storage.
+     * @param path The path to the asset.
+     * @param container True, if the asset should be a container, false, if it is a file.
+     * @return
+     */
+    StorageAsset addAsset(String path, boolean container);
+
+    /**
+     * Removes the given asset from the storage.
+     *
+     * @param asset
+     * @throws IOException
+     */
+    void removeAsset(StorageAsset asset) throws IOException;
+
+    /**
+     * Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
+     * belong to the same storage instance. It will throw a IOException if the assets are from differents storage
+     * instances.
+     *
+     * @param origin The original asset
+     * @param destination The destination path pointing to the new asset.
+     * @param copyOptions The copy options.
+     * @return The asset representation of the moved object.
+     */
+    StorageAsset moveAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
+
+    /**
+     * Moves the asset to the given location and returns the asset object for the destination. Moves only assets that
+     * belong to the same storage instance. It will throw a IOException if the assets are from differents storage
+     * instances.
+     * *
+     * @param origin The original asset
+     * @param destination The destination path.
+     * @param copyOptions The copy options (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
+     * @throws IOException If it was not possible to copy the asset.
+     */
+    void moveAsset(StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;
+
+    /**
+     * Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
+     * It will throw a IOException if the assets are from differents storage instances.
+     *
+     * @param origin The original asset
+     * @param destination The path to the new asset
+     * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
+     * @return The asset representation of the copied object
+     * @throws IOException If it was not possible to copy the asset
+     */
+    StorageAsset copyAsset(StorageAsset origin, String destination, CopyOption... copyOptions) throws IOException;
+
+    /**
+     * Copies the given asset to the new destination. Copies only assets that belong to the same storage instance.
+     * It will throw a IOException if the assets are from differents storage instances.
+     *
+     * @param origin The original asset
+     * @param destination The path to the new asset
+     * @param copyOptions The copy options, e.g. (e.g. {@link java.nio.file.StandardCopyOption#REPLACE_EXISTING}
+     * @throws IOException If it was not possible to copy the asset
+     */
+    void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions) throws IOException;
+
+
+}
diff --git a/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java b/archiva-modules/archiva-base/archiva-storage-api/src/main/java/org/apache/archiva/repository/storage/StorageAsset.java
new file mode 100644 (file)
index 0000000..5e6b529
--- /dev/null
@@ -0,0 +1,186 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.Path;
+import java.time.Instant;
+import java.util.List;
+
+/**
+ * A instance of this interface represents information about a specific asset in a repository.
+ * The asset may be an real artifact, a directory, or a virtual asset.
+ *
+ * Each asset has a unique path relative to the repository.
+ *
+ * The implementation may read the data directly from the filesystem or underlying storage implementation.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public interface StorageAsset
+{
+
+    /**
+     * Returns the storage this asset belongs to.
+     * @return
+     */
+    RepositoryStorage getStorage();
+
+    /**
+     * Returns the complete path relative to the repository to the given asset.
+     *
+     * @return A path starting with '/' that uniquely identifies the asset in the repository.
+     */
+    String getPath();
+
+    /**
+     * Returns the name of the asset. It may be just the filename.
+     * @return
+     */
+    String getName();
+
+    /**
+     * Returns the time of the last modification.
+     *
+     * @return
+     */
+    Instant getModificationTime();
+
+    /**
+     * Returns true, if this asset is a container type and contains further child assets.
+     * @return
+     */
+    boolean isContainer();
+
+    /**
+     * List the child assets.
+     *
+     * @return The list of children. If there are no children and if the asset is not a container, a empty list will be returned.
+     */
+    List<StorageAsset> list();
+
+    /**
+     * The size in bytes of the asset. If the asset does not have a size, -1 should be returned.
+     *
+     * @return The size if the asset has a size, otherwise -1
+     */
+    long getSize();
+
+    /**
+     * Returns the input stream of the artifact content.
+     * It will throw a IOException, if the stream could not be created.
+     * Implementations should create a new stream instance for each invocation and make sure that the
+     * stream is proper closed after usage.
+     *
+     * @return The InputStream representing the content of the artifact.
+     * @throws IOException
+     */
+    InputStream getReadStream() throws IOException;
+
+    /**
+     * Returns a NIO representation of the data.
+     *
+     * @return A channel to the asset data.
+     * @throws IOException
+     */
+    ReadableByteChannel getReadChannel() throws IOException;
+
+    /**
+     *
+     * Returns an output stream where you can write data to the asset. The operation is not locked or synchronized.
+     * User of this method have to make sure, that the stream is proper closed after usage.
+     *
+     * @param replace If true, the original data will be replaced, otherwise the data will be appended.
+     * @return The OutputStream where the data can be written.
+     * @throws IOException
+     */
+    OutputStream getWriteStream( boolean replace) throws IOException;
+
+    /**
+     * Returns a NIO representation of the asset where you can write the data.
+     *
+     * @param replace True, if the content should be replaced by the data written to the stream.
+     * @return The Channel for writing the data.
+     * @throws IOException
+     */
+    WritableByteChannel getWriteChannel( boolean replace) throws IOException;
+
+    /**
+     * Replaces the content. The implementation may do an atomic move operation, or keep a backup. If
+     * the operation fails, the implementation should try to restore the old data, if possible.
+     *
+     * The original file may be deleted, if the storage was successful.
+     *
+     * @param newData Replaces the data by the content of the given file.
+     */
+    boolean replaceDataFromFile( Path newData) throws IOException;
+
+    /**
+     * Returns true, if the asset exists.
+     *
+     * @return True, if the asset exists, otherwise false.
+     */
+    boolean exists();
+
+    /**
+     * Creates the asset in the underlying storage, if it does not exist.
+     */
+    void create() throws IOException;
+
+    /**
+     * Returns the real path to the asset, if it exist. Not all implementations may implement this method.
+     * The method throws {@link UnsupportedOperationException}, if and only if {@link #isFileBased()} returns false.
+     *
+     * @return The filesystem path to the asset.
+     * @throws UnsupportedOperationException If the underlying storage is not file based.
+     */
+    Path getFilePath() throws UnsupportedOperationException;
+
+    /**
+     * Returns true, if the asset can return a file path for the given asset. If this is true, the  {@link #getFilePath()}
+     * will not throw a {@link UnsupportedOperationException}
+     *
+     * @return
+     */
+    boolean isFileBased();
+
+    /**
+     * Returns true, if there is a parent to this asset.
+     * @return
+     */
+    boolean hasParent();
+
+    /**
+     * Returns the parent of this asset.
+     * @return The asset, or <code>null</code>, if it does not exist.
+     */
+    StorageAsset getParent();
+
+    /**
+     * Returns the asset relative to the given path
+     * @param toPath
+     * @return
+     */
+    StorageAsset resolve(String toPath);
+}
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/pom.xml b/archiva-modules/archiva-base/archiva-storage-fs/pom.xml
new file mode 100644 (file)
index 0000000..d128bed
--- /dev/null
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>archiva-base</artifactId>
+    <groupId>org.apache.archiva</groupId>
+    <version>3.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>archiva-storage-fs</artifactId>
+
+  <name>Archiva Base :: Storage Filesystem Based</name>
+
+  <properties>
+    <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
+  </properties>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-filelock</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+    </dependency>
+
+
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <basedir>${basedir}</basedir>
+          </systemPropertyVariables>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
\ No newline at end of file
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemAsset.java
new file mode 100644 (file)
index 0000000..b343c7b
--- /dev/null
@@ -0,0 +1,494 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.channels.FileChannel;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.*;
+import java.nio.file.attribute.*;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * Implementation of an asset that is stored on the filesystem.
+ * <p>
+ * The implementation does not check the given paths. Caller should normalize the asset path
+ * and check, if the base path is a parent of the resulting path.
+ * <p>
+ * The file must not exist for all operations.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public class FilesystemAsset implements StorageAsset {
+
+    private final static Logger log = LoggerFactory.getLogger(FilesystemAsset.class);
+
+    private final Path basePath;
+    private final Path assetPath;
+    private final String relativePath;
+
+    public static final String DEFAULT_POSIX_FILE_PERMS = "rw-rw----";
+    public static final String DEFAULT_POSIX_DIR_PERMS = "rwxrwx---";
+
+    public static final Set<PosixFilePermission> DEFAULT_POSIX_FILE_PERMISSIONS;
+    public static final Set<PosixFilePermission> DEFAULT_POSIX_DIR_PERMISSIONS;
+
+    public static final AclEntryPermission[] DEFAULT_ACL_FILE_PERMISSIONS = new AclEntryPermission[]{
+            AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
+            AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA
+    };
+
+    public static final AclEntryPermission[] DEFAULT_ACL_DIR_PERMISSIONS = new AclEntryPermission[]{
+            AclEntryPermission.ADD_FILE, AclEntryPermission.ADD_SUBDIRECTORY, AclEntryPermission.DELETE_CHILD,
+            AclEntryPermission.DELETE, AclEntryPermission.READ_ACL, AclEntryPermission.READ_ATTRIBUTES, AclEntryPermission.READ_DATA, AclEntryPermission.WRITE_ACL,
+            AclEntryPermission.WRITE_ATTRIBUTES, AclEntryPermission.WRITE_DATA, AclEntryPermission.APPEND_DATA
+    };
+
+    static {
+
+        DEFAULT_POSIX_FILE_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_FILE_PERMS);
+        DEFAULT_POSIX_DIR_PERMISSIONS = PosixFilePermissions.fromString(DEFAULT_POSIX_DIR_PERMS);
+    }
+
+    Set<PosixFilePermission> defaultPosixFilePermissions = DEFAULT_POSIX_FILE_PERMISSIONS;
+    Set<PosixFilePermission> defaultPosixDirectoryPermissions = DEFAULT_POSIX_DIR_PERMISSIONS;
+
+    List<AclEntry> defaultFileAcls;
+    List<AclEntry> defaultDirectoryAcls;
+
+    boolean supportsAcl = false;
+    boolean supportsPosix = false;
+    final boolean setPermissionsForNew;
+    final RepositoryStorage storage;
+
+    boolean directoryHint = false;
+
+    private static final OpenOption[] REPLACE_OPTIONS = new OpenOption[]{StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE};
+    private static final OpenOption[] APPEND_OPTIONS = new OpenOption[]{StandardOpenOption.APPEND};
+
+
+    FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath) {
+        this.assetPath = assetPath;
+        this.relativePath = normalizePath(path);
+        this.setPermissionsForNew=false;
+        this.basePath = basePath;
+        this.storage = storage;
+        init();
+    }
+
+    /**
+     * Creates an asset for the given path. The given paths are not checked.
+     * The base path should be an absolute path.
+     *
+     * @param path The logical path for the asset relative to the repository.
+     * @param assetPath The asset path.
+     */
+    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath) {
+        this.assetPath = assetPath;
+        this.relativePath = normalizePath(path);
+        this.setPermissionsForNew = false;
+        this.basePath = null;
+        this.storage = storage;
+        init();
+    }
+
+    /**
+     * Creates an asset for the given path. The given paths are not checked.
+     * The base path should be an absolute path.
+     *
+     * @param path The logical path for the asset relative to the repository
+     * @param assetPath The asset path.
+     * @param directory This is only relevant, if the represented file or directory does not exist yet and
+     *                  is a hint.
+     */
+    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory) {
+        this.assetPath = assetPath;
+        this.relativePath = normalizePath(path);
+        this.directoryHint = directory;
+        this.setPermissionsForNew = false;
+        this.basePath = basePath;
+        this.storage = storage;
+        init();
+    }
+
+    /**
+     * Creates an asset for the given path. The given paths are not checked.
+     * The base path should be an absolute path.
+     *
+     * @param path The logical path for the asset relative to the repository
+     * @param assetPath The asset path.
+     * @param directory This is only relevant, if the represented file or directory does not exist yet and
+     *                  is a hint.
+     */
+    public FilesystemAsset(RepositoryStorage storage, String path, Path assetPath, Path basePath, boolean directory, boolean setPermissionsForNew) {
+        this.assetPath = assetPath;
+        this.relativePath = normalizePath(path);
+        this.directoryHint = directory;
+        this.setPermissionsForNew = setPermissionsForNew;
+        this.basePath = basePath;
+        this.storage = storage;
+        init();
+    }
+
+    private String normalizePath(String path) {
+        if (!path.startsWith("/")) {
+            return "/"+path;
+        } else {
+            return path;
+        }
+    }
+
+    private void init() {
+
+        if (setPermissionsForNew) {
+            try {
+                supportsAcl = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(AclFileAttributeView.class);
+            } catch (IOException e) {
+                log.error("Could not check filesystem capabilities {}", e.getMessage());
+            }
+            try {
+                supportsPosix = Files.getFileStore(assetPath.getRoot()).supportsFileAttributeView(PosixFileAttributeView.class);
+            } catch (IOException e) {
+                log.error("Could not check filesystem capabilities {}", e.getMessage());
+            }
+
+            if (supportsAcl) {
+                AclFileAttributeView aclView = Files.getFileAttributeView(assetPath.getParent(), AclFileAttributeView.class);
+                UserPrincipal owner = null;
+                try {
+                    owner = aclView.getOwner();
+                    setDefaultFileAcls(processPermissions(owner, DEFAULT_ACL_FILE_PERMISSIONS));
+                    setDefaultDirectoryAcls(processPermissions(owner, DEFAULT_ACL_DIR_PERMISSIONS));
+
+                } catch (IOException e) {
+                    supportsAcl = false;
+                }
+
+
+            }
+        }
+    }
+
+    private List<AclEntry> processPermissions(UserPrincipal owner, AclEntryPermission[] defaultAclFilePermissions) {
+        AclEntry.Builder aclBuilder = AclEntry.newBuilder();
+        aclBuilder.setPermissions(defaultAclFilePermissions);
+        aclBuilder.setType(AclEntryType.ALLOW);
+        aclBuilder.setPrincipal(owner);
+        ArrayList<AclEntry> aclList = new ArrayList<>();
+        aclList.add(aclBuilder.build());
+        return aclList;
+    }
+
+
+    @Override
+    public RepositoryStorage getStorage( )
+    {
+        return storage;
+    }
+
+    @Override
+    public String getPath() {
+        return relativePath;
+    }
+
+    @Override
+    public String getName() {
+        return assetPath.getFileName().toString();
+    }
+
+    @Override
+    public Instant getModificationTime() {
+        try {
+            return Files.getLastModifiedTime(assetPath).toInstant();
+        } catch (IOException e) {
+            log.error("Could not read modification time of {}", assetPath);
+            return Instant.now();
+        }
+    }
+
+    /**
+     * Returns true, if the path of this asset points to a directory
+     *
+     * @return
+     */
+    @Override
+    public boolean isContainer() {
+        if (Files.exists(assetPath)) {
+            return Files.isDirectory(assetPath);
+        } else {
+            return directoryHint;
+        }
+    }
+
+    /**
+     * Returns the list of directory entries, if this asset represents a directory.
+     * Otherwise a empty list will be returned.
+     *
+     * @return The list of entries in the directory, if it exists.
+     */
+    @Override
+    public List<StorageAsset> list() {
+        try {
+            return Files.list(assetPath).map(p -> new FilesystemAsset(storage, relativePath + "/" + p.getFileName().toString(), assetPath.resolve(p)))
+                    .collect(Collectors.toList());
+        } catch (IOException e) {
+            return Collections.EMPTY_LIST;
+        }
+    }
+
+    /**
+     * Returns the size of the represented file. If it cannot be determined, -1 is returned.
+     *
+     * @return
+     */
+    @Override
+    public long getSize() {
+        try {
+            return Files.size(assetPath);
+        } catch (IOException e) {
+            return -1;
+        }
+    }
+
+    /**
+     * Returns a input stream to the underlying file, if it exists. The caller has to make sure, that
+     * the stream is closed after it was used.
+     *
+     * @return
+     * @throws IOException
+     */
+    @Override
+    public InputStream getReadStream() throws IOException {
+        if (isContainer()) {
+            throw new IOException("Can not create input stream for container");
+        }
+        return Files.newInputStream(assetPath);
+    }
+
+    @Override
+    public ReadableByteChannel getReadChannel( ) throws IOException
+    {
+        return FileChannel.open( assetPath, StandardOpenOption.READ );
+    }
+
+    private OpenOption[] getOpenOptions(boolean replace) {
+        return replace ? REPLACE_OPTIONS : APPEND_OPTIONS;
+    }
+
+    @Override
+    public OutputStream getWriteStream( boolean replace) throws IOException {
+        OpenOption[] options = getOpenOptions( replace );
+        if (!Files.exists( assetPath )) {
+            create();
+        }
+        return Files.newOutputStream(assetPath, options);
+    }
+
+    @Override
+    public WritableByteChannel getWriteChannel( boolean replace ) throws IOException
+    {
+        OpenOption[] options = getOpenOptions( replace );
+        return FileChannel.open( assetPath, options );
+    }
+
+    @Override
+    public boolean replaceDataFromFile( Path newData) throws IOException {
+        final boolean createNew = !Files.exists(assetPath);
+        Path backup = null;
+        if (!createNew) {
+            backup = findBackupFile(assetPath);
+        }
+        try {
+            if (!createNew) {
+                Files.move(assetPath, backup);
+            }
+            Files.move(newData, assetPath, StandardCopyOption.REPLACE_EXISTING);
+            applyDefaultPermissions(assetPath);
+            return true;
+        } catch (IOException e) {
+            log.error("Could not overwrite file {}", assetPath);
+            // Revert if possible
+            if (backup != null && Files.exists(backup)) {
+                Files.move(backup, assetPath, StandardCopyOption.REPLACE_EXISTING);
+            }
+            throw e;
+        } finally {
+            if (backup != null) {
+                try {
+                    Files.deleteIfExists(backup);
+                } catch (IOException e) {
+                    log.error("Could not delete backup file {}", backup);
+                }
+            }
+        }
+
+    }
+
+    private void applyDefaultPermissions(Path filePath) {
+        try {
+            if (supportsPosix) {
+                Set<PosixFilePermission> perms;
+                if (Files.isDirectory(filePath)) {
+                    perms = defaultPosixFilePermissions;
+                } else {
+                    perms = defaultPosixDirectoryPermissions;
+                }
+                Files.setPosixFilePermissions(filePath, perms);
+            } else if (supportsAcl) {
+                List<AclEntry> perms;
+                if (Files.isDirectory(filePath)) {
+                    perms = getDefaultDirectoryAcls();
+                } else {
+                    perms = getDefaultFileAcls();
+                }
+                AclFileAttributeView aclAttr = Files.getFileAttributeView(filePath, AclFileAttributeView.class);
+                aclAttr.setAcl(perms);
+            }
+        } catch (IOException e) {
+            log.error("Could not set permissions for {}: {}", filePath, e.getMessage());
+        }
+    }
+
+    private Path findBackupFile(Path file) {
+        String ext = ".bak";
+        Path backupPath = file.getParent().resolve(file.getFileName().toString() + ext);
+        int idx = 0;
+        while (Files.exists(backupPath)) {
+            backupPath = file.getParent().resolve(file.getFileName().toString() + ext + idx++);
+        }
+        return backupPath;
+    }
+
+    @Override
+    public boolean exists() {
+        return Files.exists(assetPath);
+    }
+
+    @Override
+    public Path getFilePath() throws UnsupportedOperationException {
+        return assetPath;
+    }
+
+    @Override
+    public boolean isFileBased( )
+    {
+        return true;
+    }
+
+    @Override
+    public boolean hasParent( )
+    {
+        if (basePath!=null && assetPath.equals(basePath)) {
+                return false;
+        }
+        return assetPath.getParent()!=null;
+    }
+
+    @Override
+    public StorageAsset getParent( )
+    {
+        Path parentPath;
+        if (basePath!=null && assetPath.equals( basePath )) {
+            parentPath=null;
+        } else
+        {
+            parentPath = assetPath.getParent( );
+        }
+        String relativeParent = StringUtils.substringBeforeLast( relativePath,"/");
+        if (parentPath!=null) {
+            return new FilesystemAsset(storage, relativeParent, parentPath, basePath, true, setPermissionsForNew );
+        } else {
+            return null;
+        }
+    }
+
+    @Override
+    public StorageAsset resolve(String toPath) {
+        return storage.getAsset(this.getPath()+"/"+toPath);
+    }
+
+
+    public void setDefaultFileAcls(List<AclEntry> acl) {
+        defaultFileAcls = acl;
+    }
+
+    public List<AclEntry> getDefaultFileAcls() {
+        return defaultFileAcls;
+    }
+
+    public void setDefaultPosixFilePermissions(Set<PosixFilePermission> perms) {
+        defaultPosixFilePermissions = perms;
+    }
+
+    public Set<PosixFilePermission> getDefaultPosixFilePermissions() {
+        return defaultPosixFilePermissions;
+    }
+
+    public void setDefaultDirectoryAcls(List<AclEntry> acl) {
+        defaultDirectoryAcls = acl;
+    }
+
+    public List<AclEntry> getDefaultDirectoryAcls() {
+        return defaultDirectoryAcls;
+    }
+
+    public void setDefaultPosixDirectoryPermissions(Set<PosixFilePermission> perms) {
+        defaultPosixDirectoryPermissions = perms;
+    }
+
+    public Set<PosixFilePermission> getDefaultPosixDirectoryPermissions() {
+        return defaultPosixDirectoryPermissions;
+    }
+
+    @Override
+    public void create() throws IOException {
+        if (!Files.exists(assetPath)) {
+            if (directoryHint) {
+                Files.createDirectories(assetPath);
+            } else {
+                if (!Files.exists( assetPath.getParent() )) {
+                    Files.createDirectories( assetPath.getParent( ) );
+                }
+                Files.createFile(assetPath);
+            }
+            if (setPermissionsForNew) {
+                applyDefaultPermissions(assetPath);
+            }
+        }
+    }
+
+    @Override
+    public String toString() {
+        return relativePath+":"+assetPath;
+    }
+
+}
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/FilesystemStorage.java
new file mode 100644 (file)
index 0000000..860a7e9
--- /dev/null
@@ -0,0 +1,388 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.FileLockException;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.filelock.FileLockTimeoutException;
+import org.apache.archiva.common.filelock.Lock;
+import org.apache.commons.io.FileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.channels.FileChannel;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.CopyOption;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.StandardOpenOption;
+import java.util.function.Consumer;
+
+/**
+ * Implementation of <code>{@link RepositoryStorage}</code> where data is stored in the filesystem.
+ *
+ * All files are relative to a given base path. Path values are separated by '/', '..' is allowed to navigate
+ * to a parent directory, but navigation out of the base path will lead to a exception.
+ */
+public class FilesystemStorage implements RepositoryStorage {
+
+    private static final Logger log = LoggerFactory.getLogger(FilesystemStorage.class);
+
+    private final Path basePath;
+    private final FileLockManager fileLockManager;
+
+    public FilesystemStorage(Path basePath, FileLockManager fileLockManager) throws IOException {
+        if (!Files.exists(basePath)) {
+            Files.createDirectories(basePath);
+        }
+        this.basePath = basePath.normalize().toRealPath();
+        this.fileLockManager = fileLockManager;
+    }
+
+    private Path normalize(final String path) {
+        String nPath = path;
+        while (nPath.startsWith("/")) {
+            nPath = nPath.substring(1);
+        }
+        return Paths.get(nPath);
+    }
+
+    private Path getAssetPath(String path) throws IOException {
+        Path assetPath = basePath.resolve(normalize(path)).normalize();
+        if (!assetPath.startsWith(basePath))
+        {
+            throw new IOException("Path navigation out of allowed scope: "+path);
+        }
+        return assetPath;
+    }
+
+    @Override
+    public void consumeData(StorageAsset asset, Consumer<InputStream> consumerFunction, boolean readLock ) throws IOException
+    {
+        final Path path = asset.getFilePath();
+        try {
+            if (readLock) {
+                consumeDataLocked( path, consumerFunction );
+            } else
+            {
+                try ( InputStream is = Files.newInputStream( path ) )
+                {
+                    consumerFunction.accept( is );
+                }
+                catch ( IOException e )
+                {
+                    log.error("Could not read the input stream from file {}", path);
+                    throw e;
+                }
+            }
+        } catch (RuntimeException e)
+        {
+            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
+            throw new IOException( e );
+        }
+
+    }
+
+    @Override
+    public void consumeDataFromChannel( StorageAsset asset, Consumer<ReadableByteChannel> consumerFunction, boolean readLock ) throws IOException
+    {
+        final Path path = asset.getFilePath();
+        try {
+            if (readLock) {
+                consumeDataFromChannelLocked( path, consumerFunction );
+            } else
+            {
+                try ( FileChannel is = FileChannel.open( path, StandardOpenOption.READ ) )
+                {
+                    consumerFunction.accept( is );
+                }
+                catch ( IOException e )
+                {
+                    log.error("Could not read the input stream from file {}", path);
+                    throw e;
+                }
+            }
+        } catch (RuntimeException e)
+        {
+            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
+            throw new IOException( e );
+        }
+    }
+
+    @Override
+    public void writeData( StorageAsset asset, Consumer<OutputStream> consumerFunction, boolean writeLock ) throws IOException
+    {
+        final Path path = asset.getFilePath();
+        try {
+            if (writeLock) {
+                writeDataLocked( path, consumerFunction );
+            } else
+            {
+                try ( OutputStream is = Files.newOutputStream( path ) )
+                {
+                    consumerFunction.accept( is );
+                }
+                catch ( IOException e )
+                {
+                    log.error("Could not write the output stream to file {}", path);
+                    throw e;
+                }
+            }
+        } catch (RuntimeException e)
+        {
+            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
+            throw new IOException( e );
+        }
+
+    }
+
+    @Override
+    public void writeDataToChannel( StorageAsset asset, Consumer<WritableByteChannel> consumerFunction, boolean writeLock ) throws IOException
+    {
+        final Path path = asset.getFilePath();
+        try {
+            if (writeLock) {
+                writeDataToChannelLocked( path, consumerFunction );
+            } else
+            {
+                try ( FileChannel os = FileChannel.open( path, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE ))
+                {
+                    consumerFunction.accept( os );
+                }
+                catch ( IOException e )
+                {
+                    log.error("Could not write the data to file {}", path);
+                    throw e;
+                }
+            }
+        } catch (RuntimeException e)
+        {
+            log.error( "Runtime exception during data consume from artifact {}. Error: {}", path, e.getMessage() );
+            throw new IOException( e );
+        }
+    }
+
+    private void consumeDataLocked( Path file, Consumer<InputStream> consumerFunction) throws IOException
+    {
+
+        final Lock lock;
+        try
+        {
+            lock = fileLockManager.readFileLock( file );
+            try ( InputStream is = Files.newInputStream( lock.getFile()))
+            {
+                consumerFunction.accept( is );
+            }
+            catch ( IOException e )
+            {
+                log.error("Could not read the input stream from file {}", file);
+                throw e;
+            } finally
+            {
+                fileLockManager.release( lock );
+            }
+        }
+        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
+        {
+            log.error("Locking error on file {}", file);
+            throw new IOException(e);
+        }
+    }
+
+    private void consumeDataFromChannelLocked( Path file, Consumer<ReadableByteChannel> consumerFunction) throws IOException
+    {
+
+        final Lock lock;
+        try
+        {
+            lock = fileLockManager.readFileLock( file );
+            try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.READ ))
+            {
+                consumerFunction.accept( is );
+            }
+            catch ( IOException e )
+            {
+                log.error("Could not read the input stream from file {}", file);
+                throw e;
+            } finally
+            {
+                fileLockManager.release( lock );
+            }
+        }
+        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
+        {
+            log.error("Locking error on file {}", file);
+            throw new IOException(e);
+        }
+    }
+
+
+    private void writeDataLocked( Path file, Consumer<OutputStream> consumerFunction) throws IOException
+    {
+
+        final Lock lock;
+        try
+        {
+            lock = fileLockManager.writeFileLock( file );
+            try ( OutputStream is = Files.newOutputStream( lock.getFile()))
+            {
+                consumerFunction.accept( is );
+            }
+            catch ( IOException e )
+            {
+                log.error("Could not write the output stream to file {}", file);
+                throw e;
+            } finally
+            {
+                fileLockManager.release( lock );
+            }
+        }
+        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
+        {
+            log.error("Locking error on file {}", file);
+            throw new IOException(e);
+        }
+    }
+
+    private void writeDataToChannelLocked( Path file, Consumer<WritableByteChannel> consumerFunction) throws IOException
+    {
+
+        final Lock lock;
+        try
+        {
+            lock = fileLockManager.writeFileLock( file );
+            try ( FileChannel is = FileChannel.open( lock.getFile( ), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE ))
+            {
+                consumerFunction.accept( is );
+            }
+            catch ( IOException e )
+            {
+                log.error("Could not write to file {}", file);
+                throw e;
+            } finally
+            {
+                fileLockManager.release( lock );
+            }
+        }
+        catch ( FileLockException | FileNotFoundException | FileLockTimeoutException e)
+        {
+            log.error("Locking error on file {}", file);
+            throw new IOException(e);
+        }
+    }
+
+    @Override
+    public StorageAsset getAsset( String path )
+    {
+        try {
+            return new FilesystemAsset(this, path, getAssetPath(path));
+        } catch (IOException e) {
+            throw new IllegalArgumentException("Path navigates outside of base directory "+path);
+        }
+    }
+
+    @Override
+    public StorageAsset addAsset( String path, boolean container )
+    {
+        try {
+            return new FilesystemAsset(this, path, getAssetPath(path), basePath, container);
+        } catch (IOException e) {
+            throw new IllegalArgumentException("Path navigates outside of base directory "+path);
+        }
+    }
+
+    @Override
+    public void removeAsset( StorageAsset asset ) throws IOException
+    {
+        Files.delete(asset.getFilePath());
+    }
+
+    @Override
+    public StorageAsset moveAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
+    {
+        boolean container = origin.isContainer();
+        FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container );
+        moveAsset( origin, newAsset, copyOptions );
+        return newAsset;
+    }
+
+    @Override
+    public void moveAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
+    {
+        if (origin.getStorage()!=this) {
+            throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
+        }
+        if (destination.getStorage()!=this) {
+            throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
+        }
+        Files.move(origin.getFilePath(), destination.getFilePath(), copyOptions);
+    }
+
+    @Override
+    public StorageAsset copyAsset( StorageAsset origin, String destination, CopyOption... copyOptions ) throws IOException
+    {
+        boolean container = origin.isContainer();
+        FilesystemAsset newAsset = new FilesystemAsset(this, destination, getAssetPath(destination), basePath, container );
+        copyAsset( origin, newAsset, copyOptions );
+        return newAsset;
+    }
+
+    @Override
+    public void copyAsset( StorageAsset origin, StorageAsset destination, CopyOption... copyOptions ) throws IOException
+    {
+        if (origin.getStorage()!=this) {
+            throw new IOException("The origin asset does not belong to this storage instance. Cannot copy between different storage instances.");
+        }
+        if (destination.getStorage()!=this) {
+            throw new IOException("The destination asset does not belong to this storage instance. Cannot copy between different storage instances.");
+        }
+        Path destinationPath = destination.getFilePath();
+        boolean overwrite = false;
+        for (int i=0; i<copyOptions.length; i++) {
+            if (copyOptions[i].equals( StandardCopyOption.REPLACE_EXISTING )) {
+                overwrite=true;
+            }
+        }
+        if (Files.exists(destinationPath) && !overwrite) {
+            throw new IOException("Destination file exists already "+ destinationPath);
+        }
+        if (Files.isDirectory( origin.getFilePath() ))
+        {
+            FileUtils.copyDirectory(origin.getFilePath( ).toFile(), destinationPath.toFile() );
+        } else if (Files.isRegularFile( origin.getFilePath() )) {
+            if (!Files.exists( destinationPath )) {
+                Files.createDirectories( destinationPath );
+            }
+            Files.copy( origin.getFilePath( ), destinationPath, copyOptions );
+        }
+    }
+
+    public FileLockManager getFileLockManager() {
+        return fileLockManager;
+    }
+
+}
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/StorageUtil.java b/archiva-modules/archiva-base/archiva-storage-fs/src/main/java/org/apache/archiva/repository/storage/StorageUtil.java
new file mode 100644 (file)
index 0000000..a900680
--- /dev/null
@@ -0,0 +1,346 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.FileLockException;
+import org.apache.archiva.common.filelock.FileLockManager;
+import org.apache.archiva.common.filelock.FileLockTimeoutException;
+import org.apache.archiva.common.filelock.Lock;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.nio.file.*;
+import java.util.HashSet;
+import java.util.function.Consumer;
+
+/**
+ *
+ * Utility class for assets. Allows to copy, move between different storage instances and
+ * recursively consume the tree.
+ *
+ * @author Martin Stockhammer <martin_s@apache.org>
+ */
+public class StorageUtil
+{
+    private static final int DEFAULT_BUFFER_SIZE = 4096;
+    private static final Logger log = LoggerFactory.getLogger(StorageUtil.class);
+
+    /**
+     * Copies the source asset to the target. The assets may be from different RepositoryStorage instances.
+     * If you know that source and asset are from the same storage instance, the copy method of the storage
+     * instance may be faster.
+     *
+     * @param source The source asset
+     * @param target The target asset
+     * @param locked If true, a readlock is set on the source and a write lock is set on the target.
+     * @param copyOptions Copy options
+     * @throws IOException
+     */
+    public static final void copyAsset( final StorageAsset source,
+                                        final StorageAsset target,
+                                        boolean locked,
+                                        final CopyOption... copyOptions ) throws IOException
+    {
+        if (source.isFileBased() && target.isFileBased()) {
+            // Short cut for FS operations
+            final Path sourcePath = source.getFilePath();
+            final Path targetPath = target.getFilePath( );
+            if (locked) {
+                final FileLockManager lmSource = ((FilesystemStorage)source.getStorage()).getFileLockManager();
+                final FileLockManager lmTarget = ((FilesystemStorage)target.getStorage()).getFileLockManager();
+                try (Lock lockRead = lmSource.readFileLock( sourcePath ); Lock lockWrite = lmTarget.writeFileLock( targetPath ) )
+                {
+                    Files.copy( sourcePath, targetPath, copyOptions );
+                }
+                catch ( FileLockException e )
+                {
+                    throw new IOException( e );
+                }
+                catch ( FileLockTimeoutException e )
+                {
+                    throw new IOException( e );
+                }
+            } else
+            {
+                Files.copy( sourcePath, targetPath, copyOptions );
+            }
+        } else {
+            try {
+                final RepositoryStorage sourceStorage = source.getStorage();
+                final RepositoryStorage targetStorage = target.getStorage();
+                sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
+            }  catch (IOException e) {
+                throw e;
+            }  catch (Throwable e) {
+                Throwable cause = e.getCause();
+                if (cause instanceof IOException) {
+                    throw (IOException)cause;
+                } else
+                {
+                    throw new IOException( e );
+                }
+            }
+        }
+    }
+
+    /**
+     * Moves a asset between different storage instances.
+     * If you know that source and asset are from the same storage instance, the move method of the storage
+     * instance may be faster.
+     *
+     * @param source The source asset
+     * @param target The target asset
+     * @param locked If true, a lock is used for the move operation.
+     * @param copyOptions Options for copying
+     * @throws IOException If the move fails
+     */
+    public static final void moveAsset(StorageAsset source, StorageAsset target, boolean locked, CopyOption... copyOptions) throws IOException
+    {
+        if (source.isFileBased() && target.isFileBased()) {
+            // Short cut for FS operations
+            // Move is atomic operation
+            Files.move( source.getFilePath(), target.getFilePath(), copyOptions );
+        } else {
+            try {
+                final RepositoryStorage sourceStorage = source.getStorage();
+                final RepositoryStorage targetStorage = target.getStorage();
+                sourceStorage.consumeDataFromChannel( source, is -> wrapWriteFunction( is, targetStorage, target, locked ), locked);
+                sourceStorage.removeAsset( source );
+            }  catch (IOException e) {
+                throw e;
+            }  catch (Throwable e) {
+                Throwable cause = e.getCause();
+                if (cause instanceof IOException) {
+                    throw (IOException)cause;
+                } else
+                {
+                    throw new IOException( e );
+                }
+            }
+        }
+
+    }
+
+    private static final void wrapWriteFunction(ReadableByteChannel is, RepositoryStorage targetStorage, StorageAsset target, boolean locked) {
+        try {
+            targetStorage.writeDataToChannel( target, os -> copy(is, os), locked );
+        } catch (Exception e) {
+            throw new RuntimeException( e );
+        }
+    }
+
+
+    private static final void copy( final ReadableByteChannel is, final WritableByteChannel os ) {
+        if (is instanceof FileChannel) {
+            copy( (FileChannel) is, os );
+        } else if (os instanceof FileChannel) {
+            copy(is, (FileChannel)os);
+        } else
+        {
+            try
+            {
+                ByteBuffer buffer = ByteBuffer.allocate( DEFAULT_BUFFER_SIZE );
+                while ( is.read( buffer ) != -1 )
+                {
+                    buffer.flip( );
+                    while ( buffer.hasRemaining( ) )
+                    {
+                        os.write( buffer );
+                    }
+                    buffer.clear( );
+                }
+            }
+            catch ( IOException e )
+            {
+                throw new RuntimeException( e );
+            }
+        }
+    }
+
+    private static final void copy( final FileChannel is, final WritableByteChannel os ) {
+        try
+        {
+            is.transferTo( 0, is.size( ), os );
+        }
+        catch ( IOException e )
+        {
+            throw new RuntimeException( e );
+        }
+    }
+
+    private static final void copy( final ReadableByteChannel is, final FileChannel os ) {
+        try
+        {
+            os.transferFrom( is, 0, Long.MAX_VALUE );
+        }
+        catch ( IOException e )
+        {
+            throw new RuntimeException( e );
+        }
+    }
+
+    /**
+     * Runs the consumer function recursively on each asset found starting at the base path
+     * @param baseAsset The base path where to start search
+     * @param consumer The consumer function applied to each found asset
+     * @param depthFirst If true, the deepest elements are consumed first.
+     * @param maxDepth The maximum depth to recurse into. 0 means, only the baseAsset is consumed, 1 the base asset and its children and so forth.
+     */
+    public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth) throws IOException {
+        recurse(baseAsset, consumer, depthFirst, maxDepth, 0);
+    }
+
+    /**
+     * Runs the consumer function recursively on each asset found starting at the base path. The function descends into
+     * maximum depth.
+     *
+     * @param baseAsset The base path where to start search
+     * @param consumer The consumer function applied to each found asset
+     * @param depthFirst If true, the deepest elements are consumed first.
+     */
+    public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst) throws IOException {
+        recurse(baseAsset, consumer, depthFirst, Integer.MAX_VALUE, 0);
+    }
+
+    /**
+     * Runs the consumer function recursively on each asset found starting at the base path. It does not recurse with
+     * depth first and stops only if there are no more children available.
+     *
+     * @param baseAsset The base path where to start search
+     * @param consumer The consumer function applied to each found asset
+     */
+    public static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer) throws IOException {
+        recurse(baseAsset, consumer, false, Integer.MAX_VALUE, 0);
+    }
+
+    private static final void recurse(final StorageAsset baseAsset, final Consumer<StorageAsset> consumer, final boolean depthFirst, final int maxDepth, final int currentDepth)
+    throws IOException {
+        if (!depthFirst) {
+            consumer.accept(baseAsset);
+        }
+        if (currentDepth<maxDepth && baseAsset.isContainer()) {
+            for(StorageAsset asset : baseAsset.list() ) {
+                recurse(asset, consumer, depthFirst, maxDepth, currentDepth+1);
+            }
+        }
+        if (depthFirst) {
+            consumer.accept(baseAsset);
+        }
+    }
+
+    /**
+     * Deletes the given asset and all child assets recursively.
+     * @param baseDir The base asset to remove.
+     * @throws IOException
+     */
+    public static final void deleteRecursively(StorageAsset baseDir) throws IOException {
+        recurse(baseDir, a -> {
+            try {
+                a.getStorage().removeAsset(a);
+            } catch (IOException e) {
+                log.error("Could not delete asset {}", a.getPath());
+            }
+        },true);
+    }
+
+    /**
+     * Returns the extension of the name of a given asset. Extension is the substring after the last occurence of '.' in the
+     * string. If no '.' is found, the empty string is returned.
+     *
+     * @param asset The asset from which to return the extension string.
+     * @return The extension.
+     */
+    public static final String getExtension(StorageAsset asset) {
+        return StringUtils.substringAfterLast(asset.getName(),".");
+    }
+
+    public static final void copyToLocalFile(StorageAsset asset, Path destination, CopyOption... copyOptions) throws IOException {
+        if (asset.isFileBased()) {
+            Files.copy(asset.getFilePath(), destination, copyOptions);
+        } else {
+            try {
+
+                HashSet<OpenOption> openOptions = new HashSet<>();
+                for (CopyOption option : copyOptions) {
+                    if (option == StandardCopyOption.REPLACE_EXISTING) {
+                        openOptions.add(StandardOpenOption.CREATE);
+                        openOptions.add(StandardOpenOption.TRUNCATE_EXISTING);
+                        openOptions.add(StandardOpenOption.WRITE);
+                    } else {
+                        openOptions.add(StandardOpenOption.WRITE);
+                        openOptions.add(StandardOpenOption.CREATE_NEW);
+                    }
+                }
+                asset.getStorage().consumeDataFromChannel(asset, channel -> {
+                    try {
+                        FileChannel.open(destination, openOptions).transferFrom(channel, 0, Long.MAX_VALUE);
+                    } catch (IOException e) {
+                        throw new RuntimeException(e);
+                    }
+                }, false);
+            } catch (Throwable e) {
+                if (e.getCause() instanceof IOException) {
+                    throw (IOException)e.getCause();
+                } else {
+                    throw new IOException(e);
+                }
+            }
+        }
+    }
+
+    public static class PathInformation {
+        final Path path ;
+        final boolean tmpFile;
+
+        PathInformation(Path path, boolean tmpFile) {
+            this.path = path;
+            this.tmpFile = tmpFile;
+        }
+
+        public Path getPath() {
+            return path;
+        }
+
+        public boolean isTmpFile() {
+            return tmpFile;
+        }
+
+    }
+
+    public static final PathInformation getAssetDataAsPath(StorageAsset asset) throws IOException {
+        if (!asset.exists()) {
+            throw new IOException("Asset does not exist");
+        }
+        if (asset.isFileBased()) {
+            return new PathInformation(asset.getFilePath(), false);
+        } else {
+            Path tmpFile = Files.createTempFile(asset.getName(), getExtension(asset));
+            copyToLocalFile(asset, tmpFile, StandardCopyOption.REPLACE_EXISTING);
+            return new PathInformation(tmpFile, true);
+        }
+    }
+
+}
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemAssetTest.java
new file mode 100644 (file)
index 0000000..566c0cb
--- /dev/null
@@ -0,0 +1,203 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.Instant;
+
+public class FilesystemAssetTest {
+
+    Path assetPathFile;
+    Path assetPathDir;
+    FilesystemStorage filesystemStorage;
+
+    @Before
+    public void init() throws IOException {
+        assetPathDir = Files.createTempDirectory("assetDir");
+        assetPathFile = Files.createTempFile(assetPathDir,"assetFile", "dat");
+        filesystemStorage = new FilesystemStorage(assetPathDir, new DefaultFileLockManager());
+    }
+
+    @After
+    public void cleanup() {
+
+        try {
+            Files.deleteIfExists(assetPathFile);
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+        FileUtils.deleteQuietly(assetPathDir.toFile());
+    }
+
+
+    @Test
+    public void getPath() {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, assetPathFile.getFileName().toString(), assetPathFile);
+        Assert.assertEquals("/"+assetPathFile.getFileName().toString(), asset.getPath());
+    }
+
+    @Test
+    public void getName() {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/"+assetPathFile.getFileName().toString(), assetPathFile);
+        Assert.assertEquals(assetPathFile.getFileName().toString(), asset.getName());
+
+    }
+
+    @Test
+    public void getModificationTime() throws IOException {
+        Instant modTime = Files.getLastModifiedTime(assetPathFile).toInstant();
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test123", assetPathFile);
+        Assert.assertTrue(modTime.equals(asset.getModificationTime()));
+    }
+
+    @Test
+    public void isContainer() {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1323", assetPathFile);
+        Assert.assertFalse(asset.isContainer());
+        FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
+        Assert.assertTrue(asset2.isContainer());
+    }
+
+    @Test
+    public void list() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Assert.assertEquals(0, asset.list().size());
+
+        FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1235", assetPathDir);
+        Assert.assertEquals(1, asset2.list().size());
+        Path f1 = Files.createTempFile(assetPathDir, "testfile", "dat");
+        Path f2 = Files.createTempFile(assetPathDir, "testfile", "dat");
+        Path d1 = Files.createTempDirectory(assetPathDir, "testdir");
+        Assert.assertEquals(4, asset2.list().size());
+        Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f1.getFileName().toString())));
+        Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(f2.getFileName().toString())));
+        Assert.assertTrue(asset2.list().stream().anyMatch(p -> p.getName().equals(d1.getFileName().toString())));
+        Files.deleteIfExists(f1);
+        Files.deleteIfExists(f2);
+        Files.deleteIfExists(d1);
+
+
+    }
+
+    @Test
+    public void getSize() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Assert.assertEquals(0, asset.getSize());
+
+        Files.write(assetPathFile, new String("abcdef").getBytes("ASCII"));
+        Assert.assertTrue(asset.getSize()>=6);
+
+
+    }
+
+    @Test
+    public void getData() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
+        try(InputStream is = asset.getReadStream()) {
+            Assert.assertEquals("abcdef", IOUtils.toString(is, "ASCII"));
+        }
+
+    }
+
+    @Test
+    public void getDataExceptionOnDir() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
+        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
+        try {
+            InputStream is = asset.getReadStream();
+            Assert.assertFalse("Exception expected for data on dir", true);
+        } catch (IOException e) {
+            // fine
+        }
+
+    }
+
+    @Test
+    public void writeData() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
+        try(OutputStream os  = asset.getWriteStream(true)) {
+            IOUtils.write("test12345", os, "ASCII");
+        }
+        Assert.assertEquals("test12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
+    }
+
+    @Test
+    public void writeDataAppend() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Files.write(assetPathFile, "abcdef".getBytes("ASCII"));
+        try(OutputStream os  = asset.getWriteStream(false)) {
+            IOUtils.write("test12345", os, "ASCII");
+        }
+        Assert.assertEquals("abcdeftest12345", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
+    }
+
+    @Test
+    public void writeDataExceptionOnDir() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathDir);
+        try {
+
+            OutputStream os = asset.getWriteStream(true);
+            Assert.assertTrue("Writing to a directory should throw a IOException", false);
+        } catch (IOException e) {
+            // Fine
+        }
+    }
+
+    @Test
+    public void storeDataFile() throws IOException {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Path dataFile = Files.createTempFile("testdata", "dat");
+        try(OutputStream os = Files.newOutputStream(dataFile)) {
+            IOUtils.write("testkdkdkd", os, "ASCII");
+        }
+        asset.replaceDataFromFile(dataFile);
+        Assert.assertEquals("testkdkdkd", IOUtils.toString(assetPathFile.toUri().toURL(), "ASCII"));
+    }
+
+    @Test
+    public void exists() {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Assert.assertTrue(asset.exists());
+        FilesystemAsset asset2 = new FilesystemAsset(filesystemStorage, "/test1234", Paths.get("abcdefgkdkdk"));
+        Assert.assertFalse(asset2.exists());
+
+    }
+
+    @Test
+    public void getFilePath() {
+        FilesystemAsset asset = new FilesystemAsset(filesystemStorage, "/test1234", assetPathFile);
+        Assert.assertEquals(assetPathFile, asset.getFilePath());
+    }
+}
\ No newline at end of file
diff --git a/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java b/archiva-modules/archiva-base/archiva-storage-fs/src/test/java/org/apache/archiva/repository/storage/FilesystemStorageTest.java
new file mode 100644 (file)
index 0000000..ebbc6a5
--- /dev/null
@@ -0,0 +1,200 @@
+package org.apache.archiva.repository.storage;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+
+import static org.junit.Assert.*;
+
+public class FilesystemStorageTest {
+
+    private FilesystemStorage fsStorage;
+    private FilesystemAsset file1Asset;
+    private FilesystemAsset dir1Asset;
+    private Path baseDir;
+    private Path file1;
+    private Path dir1;
+
+    @Before
+    public void init() throws IOException {
+        baseDir = Files.createTempDirectory("FsStorageTest");
+        DefaultFileLockManager fl = new DefaultFileLockManager();
+        fsStorage = new FilesystemStorage(baseDir,fl);
+        Files.createDirectories(baseDir.resolve("dir1"));
+        Files.createDirectories(baseDir.resolve("dir2"));
+        file1 = Files.createFile(baseDir.resolve("dir1/testfile1.dat"));
+        dir1 = Files.createDirectories(baseDir.resolve("dir1/testdir"));
+        file1Asset = new FilesystemAsset(fsStorage, "/dir1/testfile1.dat", file1);
+        dir1Asset = new FilesystemAsset(fsStorage, "/dir1/testdir", dir1);
+    }
+
+    private class StringResult {
+        public String getData() {
+            return data;
+        }
+
+        public void setData(String data) {
+            this.data = data;
+        }
+
+        String data;
+    }
+
+
+    @After
+    public void cleanup() {
+        FileUtils.deleteQuietly(file1.toFile());
+        FileUtils.deleteQuietly(dir1.toFile());
+        FileUtils.deleteQuietly(baseDir.resolve("dir1").toFile());
+        FileUtils.deleteQuietly(baseDir.resolve("dir2").toFile());
+        FileUtils.deleteQuietly(baseDir.toFile());
+    }
+
+
+
+
+    @Test
+    public void consumeData() throws IOException {
+        try(OutputStream os = Files.newOutputStream(file1)) {
+            IOUtils.write("abcdefghijkl", os, "ASCII");
+        }
+        StringResult result = new StringResult();
+        fsStorage.consumeData(file1Asset, is -> consume(is, result), false );
+        Assert.assertEquals("abcdefghijkl" ,result.getData());
+    }
+
+    private void consume(InputStream is, StringResult result) {
+        try {
+            result.setData(IOUtils.toString(is, "ASCII"));
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+
+    @Test
+    public void getAsset() {
+        StorageAsset asset = fsStorage.getAsset("/dir1/testfile1.dat");
+        Assert.assertEquals(file1, asset.getFilePath());
+    }
+
+    @Test
+    public void addAsset() {
+        StorageAsset newAsset = fsStorage.addAsset("dir2/test", false);
+        Assert.assertNotNull(newAsset);
+        Assert.assertFalse(newAsset.isContainer());
+        Assert.assertFalse(newAsset.exists());
+
+        StorageAsset newDirAsset = fsStorage.addAsset("/dir2/testdir2", true);
+        Assert.assertNotNull(newDirAsset);
+        Assert.assertTrue(newDirAsset.isContainer());
+        Assert.assertFalse(newDirAsset.exists());
+    }
+
+    @Test
+    public void removeAsset() throws IOException {
+        Assert.assertTrue(Files.exists(file1));
+        fsStorage.removeAsset(file1Asset);
+        Assert.assertFalse(Files.exists(file1));
+
+        Assert.assertTrue(Files.exists(dir1));
+        fsStorage.removeAsset(dir1Asset);
+        Assert.assertFalse(Files.exists(dir1));
+    }
+
+    @Test
+    public void moveAsset() throws IOException {
+        Path newFile=null;
+        Path newDir=null;
+        try {
+            Assert.assertTrue(Files.exists(file1));
+            try (OutputStream os = Files.newOutputStream(file1)) {
+                IOUtils.write("testakdkkdkdkdk", os, "ASCII");
+            }
+            long fileSize = Files.size(file1);
+            fsStorage.moveAsset(file1Asset, "/dir2/testfile2.dat");
+            Assert.assertFalse(Files.exists(file1));
+            newFile = baseDir.resolve("dir2/testfile2.dat");
+            Assert.assertTrue(Files.exists(newFile));
+            Assert.assertEquals(fileSize, Files.size(newFile));
+
+
+            Assert.assertTrue(Files.exists(dir1));
+            newDir = baseDir.resolve("dir2/testdir2");
+            fsStorage.moveAsset(dir1Asset, "dir2/testdir2");
+            Assert.assertFalse(Files.exists(dir1));
+            Assert.assertTrue(Files.exists(newDir));
+        } finally {
+            if (newFile!=null) Files.deleteIfExists(newFile);
+            if (newDir!=null) Files.deleteIfExists(newDir);
+        }
+    }
+
+    @Test
+    public void copyAsset() throws IOException {
+        Path newFile=null;
+        Path newDir=null;
+        try {
+            Assert.assertTrue(Files.exists(file1));
+            try (OutputStream os = Files.newOutputStream(file1)) {
+                IOUtils.write("testakdkkdkdkdk", os, "ASCII");
+            }
+            long fileSize = Files.size(file1);
+            fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat", StandardCopyOption.REPLACE_EXISTING);
+            Assert.assertTrue(Files.exists(file1));
+            Assert.assertEquals(fileSize, Files.size(file1));
+            newFile = baseDir.resolve("dir2/testfile2.dat");
+            Assert.assertTrue(Files.exists(newFile));
+            Assert.assertEquals(fileSize, Files.size(newFile));
+
+            try {
+                fsStorage.copyAsset(file1Asset, "/dir2/testfile2.dat");
+                Assert.assertTrue("IOException should be thrown (File exists)", false);
+            } catch (IOException ex) {
+                Assert.assertTrue("Exception must contain 'file exists'", ex.getMessage().contains("file exists"));
+            }
+
+            Assert.assertTrue(Files.exists(dir1));
+            newDir = baseDir.resolve("dir2/testdir2");
+            fsStorage.copyAsset(dir1Asset, "dir2/testdir2");
+            Assert.assertTrue(Files.exists(dir1));
+            Assert.assertTrue(Files.exists(newDir));
+        } finally {
+            if (newFile!=null) Files.deleteIfExists(newFile);
+            if (newDir!=null) FileUtils.deleteQuietly(newDir.toFile());
+        }
+    }
+}
\ No newline at end of file
index a687a95642b56ae8664703a6cc639e9993bb7d1a..a580e84a8e32cab3e5093c53a232bf7fae322587 100644 (file)
@@ -52,5 +52,7 @@
     <module>archiva-repository-scanner</module>
     <module>archiva-repository-admin</module>
     <module>archiva-security-common</module>
+    <module>archiva-storage-api</module>
+    <module>archiva-storage-fs</module>
   </modules>
 </project>
index 25a3f9d648ca98d331a091085304db38800ebd42..93cc6ce816beceeecbfc8257f3a599e7dba1989a 100644 (file)
@@ -28,12 +28,11 @@ import org.apache.archiva.consumers.InvalidRepositoryContentConsumer;
 import org.apache.archiva.consumers.KnownRepositoryContentConsumer;
 import org.apache.archiva.converter.RepositoryConversionException;
 import org.apache.archiva.repository.BasicManagedRepository;
-import org.apache.archiva.repository.content.FilesystemStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.content.maven2.ManagedDefaultRepositoryContent;
 import org.apache.archiva.repository.scanner.RepositoryScanner;
 import org.apache.archiva.repository.scanner.RepositoryScannerException;
 import org.apache.maven.artifact.repository.ArtifactRepository;
-import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
 import org.apache.maven.artifact.repository.MavenArtifactRepository;
 import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
 import org.springframework.stereotype.Service;
index 47de4b72c45ebb4e3448701ce89704ccfeb6af17..7f18ad185023a36150b1278eb9632cfda059bc5e 100644 (file)
@@ -19,14 +19,21 @@ package org.apache.archiva.indexer.maven;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.index.context.IndexingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.net.URI;
 import java.nio.file.Files;
 import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
 import java.sql.Date;
 import java.time.ZonedDateTime;
 import java.util.Set;
@@ -36,8 +43,11 @@ import java.util.Set;
  */
 public class MavenIndexContext implements ArchivaIndexingContext {
 
+    private static final Logger log = LoggerFactory.getLogger(ArchivaIndexingContext.class);
+
     private IndexingContext delegate;
     private Repository repository;
+    private StorageAsset dir = null;
 
     protected MavenIndexContext(Repository repository, IndexingContext delegate) {
         this.delegate = delegate;
@@ -56,8 +66,23 @@ public class MavenIndexContext implements ArchivaIndexingContext {
     }
 
     @Override
-    public URI getPath() {
-        return delegate.getIndexDirectoryFile().toURI();
+    public StorageAsset getPath() {
+        if (dir==null) {
+            StorageAsset repositoryDirAsset = repository.getAsset("");
+            Path repositoryDir = repositoryDirAsset.getFilePath().toAbsolutePath();
+            Path indexDir = delegate.getIndexDirectoryFile().toPath();
+            if (indexDir.startsWith(repositoryDir)) {
+                dir = repository.getAsset(repositoryDir.relativize(indexDir).toString());
+            } else {
+                try {
+                    FilesystemStorage storage = new FilesystemStorage(indexDir, new DefaultFileLockManager());
+                    dir = storage.getAsset("");
+                } catch (IOException e) {
+                    log.error("Error occured while creating storage for index dir");
+                }
+            }
+        }
+        return dir;
     }
 
     @Override
index d049112061c2d9c5d4c5a1439db55f9e03e83f00..a621a5bc8a4761b51bbae5b0c568737af00e7df3 100644 (file)
@@ -19,7 +19,6 @@ package org.apache.archiva.indexer.maven;
  * under the License.
  */
 
-import org.apache.archiva.admin.model.RepositoryAdminException;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -28,8 +27,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.indexer.IndexCreationFailedException;
 import org.apache.archiva.indexer.IndexUpdateFailedException;
 import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.indexer.merger.IndexMergerException;
-import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
 import org.apache.archiva.proxy.ProxyRegistry;
 import org.apache.archiva.proxy.maven.WagonFactory;
 import org.apache.archiva.proxy.maven.WagonFactoryException;
@@ -42,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryType;
 import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.index.ArtifactContext;
 import org.apache.maven.index.ArtifactContextProducer;
@@ -142,7 +141,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
     private ProxyRegistry proxyRegistry;
 
 
-    private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
+    private ConcurrentSkipListSet<StorageAsset> activeContexts = new ConcurrentSkipListSet<>( );
 
     private static final int WAIT_TIME = 100;
     private static final int MAX_WAIT = 10;
@@ -158,9 +157,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
         return context.getBaseContext( IndexingContext.class );
     }
 
-    private Path getIndexPath( ArchivaIndexingContext ctx )
+    private StorageAsset getIndexPath( ArchivaIndexingContext ctx )
     {
-        return PathUtil.getPathFromUri( ctx.getPath( ) );
+        return ctx.getPath( );
     }
 
     @FunctionalInterface
@@ -185,7 +184,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
         {
             throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
         }
-        final Path ctxPath = getIndexPath( context );
+        final StorageAsset ctxPath = getIndexPath( context );
         int loop = MAX_WAIT;
         boolean active = false;
         while ( loop-- > 0 && !active )
@@ -395,9 +394,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
     @Override
     public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.addArtifactsToIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -411,9 +410,9 @@ public class MavenIndexManager implements ArchivaIndexManager {
     @Override
     public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -457,9 +456,8 @@ public class MavenIndexManager implements ArchivaIndexManager {
             throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
                 + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
         }
-        MavenIndexContext context = new MavenIndexContext( repository, mvnCtx );
 
-        return context;
+        return new MavenIndexContext( repository, mvnCtx );
     }
 
     @Override
@@ -472,7 +470,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
                 log.warn("Index close failed");
             }
             try {
-                FileUtils.deleteDirectory(Paths.get(context.getPath()));
+                StorageUtil.deleteRecursively(context.getPath());
             } catch (IOException e) {
                 throw new IndexUpdateFailedException("Could not delete index files");
             }
@@ -593,51 +591,57 @@ public class MavenIndexManager implements ArchivaIndexManager {
         }
     }
 
-    private StorageAsset getIndexPath(URI indexDir, Path repoDir, String defaultDir) throws IOException
+    private StorageAsset getIndexPath(URI indexDirUri, RepositoryStorage storage, String defaultDir) throws IOException
     {
-        String indexPath = indexDir.getPath();
-        Path indexDirectory = null;
-        if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
+        Path indexDirectory;
+        Path repositoryPath = storage.getAsset("").getFilePath().toAbsolutePath();
+        StorageAsset indexDir;
+        if ( ! StringUtils.isEmpty(indexDirUri.toString( ) ) )
         {
 
-            indexDirectory = PathUtil.getPathFromUri( indexDir );
+            indexDirectory = PathUtil.getPathFromUri( indexDirUri );
             // not absolute so create it in repository directory
-            if ( indexDirectory.isAbsolute( ) )
+            if ( indexDirectory.isAbsolute( ) && !indexDirectory.startsWith(repositoryPath))
             {
-                indexPath = indexDirectory.getFileName().toString();
+                if (storage instanceof FilesystemStorage) {
+                    FilesystemStorage fsStorage = (FilesystemStorage) storage;
+                    FilesystemStorage indexStorage = new FilesystemStorage(indexDirectory.getParent(), fsStorage.getFileLockManager());
+                    indexDir = indexStorage.getAsset(indexDirectory.getFileName().toString());
+                } else {
+                    throw new IOException("The given storage is not file based.");
+                }
+            } else if (indexDirectory.isAbsolute()) {
+                indexDir = storage.getAsset(repositoryPath.relativize(indexDirectory).toString());
             }
             else
             {
-                indexDirectory = repoDir.resolve( indexDirectory );
+                indexDir = storage.getAsset(indexDirectory.toString());
             }
         }
         else
         {
-            indexDirectory = repoDir.resolve( defaultDir );
-            indexPath = defaultDir;
+            indexDir = storage.getAsset( defaultDir );
         }
 
-        if ( !Files.exists( indexDirectory ) )
+        if ( !indexDir.exists() )
         {
-            Files.createDirectories( indexDirectory );
+            indexDir.create();
         }
-        return new FilesystemAsset( indexPath, indexDirectory);
+        return indexDir;
     }
 
     private StorageAsset getIndexPath( Repository repo) throws IOException {
         IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
-        return getIndexPath( icf.getIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_INDEX_PATH);
+        return getIndexPath( icf.getIndexPath(), repo, DEFAULT_INDEX_PATH);
     }
 
     private StorageAsset getPackedIndexPath(Repository repo) throws IOException {
         IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
-        return getIndexPath(icf.getPackedIndexPath(), repo.getAsset( "" ).getFilePath(), DEFAULT_PACKED_INDEX_PATH);
+        return getIndexPath(icf.getPackedIndexPath(), repo, DEFAULT_PACKED_INDEX_PATH);
     }
 
     private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
     {
-        Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
-
         String contextKey = "remote-" + remoteRepository.getId( );
 
 
@@ -648,7 +652,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
             Files.createDirectories( repoDir );
         }
 
-        StorageAsset indexDirectory = null;
+        StorageAsset indexDirectory;
 
         // is there configured indexDirectory ?
         if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
@@ -715,7 +719,7 @@ public class MavenIndexManager implements ArchivaIndexManager {
             }
         }
 
-        StorageAsset indexDirectory = null;
+        StorageAsset indexDirectory;
 
         if ( repository.supportsFeature( IndexCreationFeature.class ) )
         {
@@ -837,23 +841,18 @@ public class MavenIndexManager implements ArchivaIndexManager {
         }
 
         @Override
-        public void connect( String id, String url )
-            throws IOException
-        {
+        public void connect( String id, String url ) {
             //no op
         }
 
         @Override
-        public void disconnect( )
-            throws IOException
-        {
+        public void disconnect( ) {
             // no op
         }
 
         @Override
         public InputStream retrieve( String name )
-            throws IOException, FileNotFoundException
-        {
+            throws IOException {
             try
             {
                 log.info( "index update retrieve file, name:{}", name );
index 2698575dbba39f00c2bb2c0b0dc1759a0472c9db..5f73f27eb3a22e957b384966d39ce094a4eed27f 100644 (file)
@@ -208,7 +208,7 @@ public class MavenIndexManagerTest {
         assertNotNull(ctx);
         assertEquals(repository, ctx.getRepository());
         assertEquals("test-repo", ctx.getId());
-        assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath());
+        assertEquals(indexPath.toAbsolutePath(), ctx.getPath().getFilePath().toAbsolutePath());
         assertTrue(Files.exists(indexPath));
         List<Path> li = Files.list(indexPath).collect(Collectors.toList());
         assertTrue(li.size()>0);
index aca1c7d4b13316215da0dc0c3a1a46121ef365c2..3448fe0015258905499ee84f3892fabab67424b8 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-model</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-xml-tools</artifactId>
index 023b8eaa27ab42e33dec0fa39a81495a894eb0dd..2c81bd9944f6ca4955eb970cc31faf786c4f12ec 100644 (file)
@@ -21,6 +21,7 @@ package org.apache.archiva.maven2.metadata;
 import org.apache.archiva.model.ArchivaRepositoryMetadata;
 import org.apache.archiva.model.Plugin;
 import org.apache.archiva.model.SnapshotVersion;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.archiva.xml.XMLReader;
 import org.apache.commons.lang.math.NumberUtils;
@@ -64,6 +65,14 @@ public class MavenMetadataReader
 
     private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class );
 
+    public static ArchivaRepositoryMetadata read(StorageAsset metadataFile) throws XMLException, IOException {
+        if (metadataFile.isFileBased()) {
+            return read(metadataFile.getFilePath());
+        } else {
+            throw new IOException("StorageAsset is not file based");
+        }
+    }
+
     /**
      * Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file.
      *
@@ -72,8 +81,7 @@ public class MavenMetadataReader
      * @throws XMLException
      */
     public static ArchivaRepositoryMetadata read( Path metadataFile )
-        throws XMLException
-    {
+            throws XMLException, IOException {
 
         XMLReader xml = new XMLReader( "metadata", metadataFile );
         // invoke this to remove namespaces, see MRM-1136
@@ -85,25 +93,9 @@ public class MavenMetadataReader
         metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) );
         metadata.setVersion( xml.getElementText( "//metadata/version" ) );
         Date modTime;
-        try
-        {
-            modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( ));
-        }
-        catch ( IOException e )
-        {
-            modTime = new Date();
-            log.error("Could not read modification time of {}", metadataFile);
-        }
+        modTime = new Date(Files.getLastModifiedTime(metadataFile).toMillis());
         metadata.setFileLastModified( modTime );
-        try
-        {
-            metadata.setFileSize( Files.size( metadataFile ) );
-        }
-        catch ( IOException e )
-        {
-            metadata.setFileSize( 0 );
-            log.error("Could not read file size of {}", metadataFile);
-        }
+        metadata.setFileSize( Files.size(metadataFile) );
 
         metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) );
         metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) );
index 9c383c37d5979af073361777dd2272ec7a793382..7119c961361d1c6381a6dba12fbec885cb46a709 100644 (file)
@@ -28,7 +28,7 @@ import org.apache.archiva.proxy.ProxyException;
 import org.apache.archiva.proxy.model.NetworkProxy;
 import org.apache.archiva.proxy.model.ProxyConnector;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.wagon.ConnectionException;
 import org.apache.maven.wagon.ResourceDoesNotExistException;
@@ -113,7 +113,7 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
      * @throws NotModifiedException
      */
     protected void transferResources( ProxyConnector connector, RemoteRepositoryContent remoteRepository,
-                                      Path tmpResource, Path[] checksumFiles, String url, String remotePath, StorageAsset resource,
+                                      StorageAsset tmpResource, StorageAsset[] checksumFiles, String url, String remotePath, StorageAsset resource,
                                       Path workingDirectory, ManagedRepositoryContent repository )
             throws ProxyException, NotModifiedException {
         Wagon wagon = null;
@@ -153,9 +153,9 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
                 // to
                 // save on connections since md5 is rarely used
                 for (int i=0; i<checksumFiles.length; i++) {
-                    String ext = "."+StringUtils.substringAfterLast( checksumFiles[i].getFileName( ).toString( ), "." );
+                    String ext = "."+StringUtils.substringAfterLast(checksumFiles[i].getName( ), "." );
                     transferChecksum(wagon, remoteRepository, remotePath, repository, resource.getFilePath(), ext,
-                        checksumFiles[i]);
+                        checksumFiles[i].getFilePath());
                 }
             }
         } catch (NotFoundException e) {
@@ -182,9 +182,9 @@ public class MavenRepositoryProxyHandler extends DefaultRepositoryProxyHandler {
 
     protected void transferArtifact(Wagon wagon, RemoteRepositoryContent remoteRepository, String remotePath,
                                     ManagedRepositoryContent repository, Path resource, Path tmpDirectory,
-                                    Path destFile)
+                                    StorageAsset destFile)
             throws ProxyException {
-        transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile);
+        transferSimpleFile(wagon, remoteRepository, remotePath, repository, resource, destFile.getFilePath());
     }
 
     /**
index 0061fa732c7347328d7ff78d2f970daae917a738..3a9c16bba40bb3334f277d16321e7d02830d1c4c 100644 (file)
@@ -26,6 +26,7 @@ import org.apache.archiva.policies.ChecksumPolicy;
 import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
 import org.apache.archiva.policies.urlcache.UrlFailureCache;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.wagon.ResourceDoesNotExistException;
 import org.easymock.EasyMock;
 import org.junit.Test;
@@ -82,7 +83,7 @@ public class CacheFailuresTransferTest
 
         wagonMockControl.replay();
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         wagonMockControl.verify();
 
@@ -92,7 +93,7 @@ public class CacheFailuresTransferTest
         downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
         wagonMockControl.verify();
 
-        assertNotDownloaded( downloadedFile);
+        assertNotDownloaded( downloadedFile.getFilePath());
         assertNoTempFiles( expectedFile );
     }
 
@@ -124,7 +125,7 @@ public class CacheFailuresTransferTest
 
         wagonMockControl.replay();
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         wagonMockControl.verify();
 
@@ -140,7 +141,7 @@ public class CacheFailuresTransferTest
 
         wagonMockControl.verify();
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNoTempFiles( expectedFile );
     }
 
@@ -168,11 +169,11 @@ public class CacheFailuresTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied2", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         // Validate that file actually came from proxied2 (as intended).
         Path proxied2File = Paths.get( REPOPATH_PROXIED2, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied2File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
         assertNoTempFiles( expectedFile );
     }
 
index e9ade0479dc7c83bbbf903212304d1d07ee0b22e..2bf77d2ec5b9d7fd0fdb4905af994505a30ce1de 100644 (file)
@@ -25,6 +25,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
 import org.apache.archiva.policies.ChecksumPolicy;
 import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.wagon.ResourceDoesNotExistException;
 import org.easymock.EasyMock;
 import org.junit.Test;
@@ -63,7 +64,7 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, true );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         assertNull( downloadedFile );
     }
@@ -85,10 +86,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
                          "e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar" );
@@ -111,10 +112,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
                          null );
@@ -137,10 +138,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
     }
@@ -162,10 +163,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, null, null );
     }
@@ -187,10 +188,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
     }
@@ -212,9 +213,9 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertChecksums( expectedFile, null, null );
     }
 
@@ -235,10 +236,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "4ec20a12dc91557330bd0b39d1805be5e329ae56  get-checksum-both-bad-1.0.jar",
                          "a292491a35925465e693a44809a078b5  get-checksum-both-bad-1.0.jar" );
@@ -261,9 +262,9 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertChecksums( expectedFile, null, null );
     }
 
@@ -284,11 +285,11 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         // This is a success situation. No SHA1 with a Good MD5.
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
     }
@@ -310,9 +311,9 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertChecksums( expectedFile, null, null );
     }
 
@@ -333,10 +334,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
                          "invalid checksum file" );
@@ -360,10 +361,10 @@ public class ChecksumTransferTest
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
                          "c35f3b76268b73a4ba617f6f275c49ab  get-checksum-sha1-bad-md5-1.0.jar" );
@@ -386,10 +387,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "71f7dc3f72053a3f2d9fdd6fef9db055ef957ffb  get-checksum-md5-only-1.0.jar",
                          "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
@@ -412,10 +413,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "1f12821c5e43e1a0b76b9564a6ddb0548ccb9486  get-default-layout-1.0.jar",
                          "3f7341545f21226b6f49a3c2704cb9be  get-default-layout-1.0.jar" );
@@ -452,7 +453,7 @@ public class ChecksumTransferTest
 
         wagonMockControl.replay();
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         wagonMockControl.verify();
 
@@ -465,7 +466,7 @@ public class ChecksumTransferTest
 
         // Test results.
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar",
                          null );
@@ -489,10 +490,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get( REPOPATH_PROXIED1, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         // There are no hashcodes on the proxy side to download, hence the local ones should remain invalid.
         assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
@@ -516,9 +517,9 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FAIL, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNoTempFiles( expectedFile );
         // There are no hashcodes on the proxy side to download.
         // The FAIL policy will delete the checksums as bad.
@@ -544,10 +545,10 @@ public class ChecksumTransferTest
         saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
         assertChecksums( expectedFile, "96a08dc80a108cba8efd3b20aec91b32a0b2cbd4  get-bad-local-checksum-1.0.jar",
                          "46fdd6ca55bf1d7a7eb0c858f41e0ccd  get-bad-local-checksum-1.0.jar" );
index cef7501b4da4b42f8dcb4ef0dce84562ae544fd7..511ea16dabdd159b3c1e95ce2a4e739c1bb74a36 100644 (file)
@@ -27,6 +27,7 @@ import org.apache.archiva.policies.ProxyDownloadException;
 import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
 import org.apache.archiva.repository.LayoutException;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.wagon.ResourceDoesNotExistException;
 import org.apache.maven.wagon.TransferFailedException;
 import org.apache.maven.wagon.authorization.AuthorizationException;
@@ -598,7 +599,7 @@ public class ErrorHandlingTest
         wagonMockControl.replay();
 
         // Attempt the proxy fetch.
-        Path downloadedFile = null;
+        StorageAsset downloadedFile = null;
         try
         {
             downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
@@ -616,33 +617,33 @@ public class ErrorHandlingTest
 
         wagonMockControl.verify();
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
     }
 
     private void confirmSuccess( String path, Path expectedFile, String basedir )
         throws Exception
     {
-        Path downloadedFile = performDownload( path );
+        StorageAsset downloadedFile = performDownload( path );
 
         Path proxied1File = Paths.get( basedir, path );
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
     }
 
     private void confirmNotDownloadedNoError( String path )
         throws Exception
     {
-        Path downloadedFile = performDownload( path );
+        StorageAsset downloadedFile = performDownload( path );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
     }
 
-    private Path performDownload( String path )
+    private StorageAsset performDownload( String path )
         throws ProxyDownloadException, LayoutException
     {
         wagonMockControl.replay();
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository,
                                                              managedDefaultRepository.toArtifactReference( path ) );
 
         wagonMockControl.verify();
index 5d2c93b4f6767f72d8970abfe34ca26a1c4ac20d..3df014c1cb8bce28b48b65a0e872834ae38fc2ab 100644 (file)
@@ -32,6 +32,7 @@ import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
 import org.apache.archiva.proxy.model.RepositoryProxyHandler;
 import org.apache.archiva.repository.*;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
 import org.apache.commons.io.FileUtils;
 import org.assertj.core.api.Assertions;
@@ -208,18 +209,18 @@ public class HttpProxyTransferTest
         ArtifactReference artifact = managedDefaultRepository.toArtifactReference( path );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path sourceFile = Paths.get( PROXIED_BASEDIR, path );
         assertNotNull( "Expected File should not be null.", expectedFile );
         assertNotNull( "Actual File should not be null.", downloadedFile );
 
-        assertTrue( "Check actual file exists.", Files.exists(downloadedFile));
-        assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile));
-        assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile));
+        assertTrue( "Check actual file exists.", Files.exists(downloadedFile.getFilePath()));
+        assertTrue( "Check filename path is appropriate.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));
+        assertTrue( "Check file path matches.", Files.isSameFile( expectedFile, downloadedFile.getFilePath()));
 
         String expectedContents = FileUtils.readFileToString( sourceFile.toFile(), Charset.defaultCharset() );
-        String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
+        String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
         assertEquals( "Check file contents.", expectedContents, actualContents );
 
         Assertions.assertThat( System.getProperty( "http.proxyHost" , "") ).isEmpty();
index 01759437413e1002b49a49537ee94c702d9eecac..03bfbceaf153958bea16d23cca804c742b13ca23 100644 (file)
@@ -24,6 +24,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
 import org.apache.archiva.policies.ChecksumPolicy;
 import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.wagon.ResourceDoesNotExistException;
@@ -64,7 +65,7 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, true );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
         assertNull( "File should not have been downloaded", downloadedFile );
     }
 
@@ -86,10 +87,10 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, sourceFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -110,13 +111,13 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
 
         Path sourceFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, sourceFile );
-        assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".sha1" )) );
-        assertFalse( Files.exists(downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".md5" ) ));
-        assertFalse( Files.exists( downloadedFile.getParent().resolve(downloadedFile.getFileName() + ".asc" ) ));
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), sourceFile );
+        assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".sha1" )) );
+        assertFalse( Files.exists(downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".md5" ) ));
+        assertFalse( Files.exists( downloadedFile.getParent().getFilePath().resolve(downloadedFile.getName() + ".asc" ) ));
         assertNoTempFiles( expectedFile );
     }
 
@@ -145,9 +146,9 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertFileEquals( expectedFile, downloadedFile, expectedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), expectedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -179,9 +180,9 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, path );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNotModified( expectedFile, originalModificationTime );
         assertNoTempFiles( expectedFile );
     }
@@ -225,9 +226,9 @@ public class ManagedDefaultTransferTest
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNotModified( expectedFile, originalModificationTime );
         assertNoTempFiles( expectedFile );
     }
@@ -270,10 +271,10 @@ public class ManagedDefaultTransferTest
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -302,10 +303,10 @@ public class ManagedDefaultTransferTest
                        CachedFailuresPolicy.NO, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -326,15 +327,15 @@ public class ManagedDefaultTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied1File = Paths.get(REPOPATH_PROXIED1, path);
         Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied1File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied1File );
         assertNoTempFiles( expectedFile );
 
         // TODO: is this check even needed if it passes above? 
-        String actualContents = FileUtils.readFileToString( downloadedFile.toFile(), Charset.defaultCharset() );
+        String actualContents = FileUtils.readFileToString( downloadedFile.getFilePath().toFile(), Charset.defaultCharset() );
         String badContents = FileUtils.readFileToString( proxied2File.toFile(), Charset.defaultCharset() );
         assertFalse( "Downloaded file contents should not be that of proxy 2",
                      StringUtils.equals( actualContents, badContents ) );
@@ -357,10 +358,10 @@ public class ManagedDefaultTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied2File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
         assertNoTempFiles( expectedFile );
     }
 
@@ -381,7 +382,7 @@ public class ManagedDefaultTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         assertNull( "File returned was: " + downloadedFile + "; should have got a not found exception",
                     downloadedFile );
@@ -412,12 +413,12 @@ public class ManagedDefaultTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, false );
 
         // Attempt the proxy fetch.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         wagonMockControl.verify();
 
         Path proxied2File = Paths.get(REPOPATH_PROXIED2, path);
-        assertFileEquals( expectedFile, downloadedFile, proxied2File );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxied2File );
         assertNoTempFiles( expectedFile );
     }
 
@@ -451,9 +452,9 @@ public class ManagedDefaultTransferTest
 
         wagonMockControl.replay();
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
 
         wagonMockControl.verify();
         assertNoTempFiles( expectedFile );
index 23bfb8503d59ec94f0f263283644938cf6ee4042..dc3681aaa722f668852306919cae2b00038677d5 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.proxy;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.VersionUtil;
 import org.apache.archiva.configuration.ProxyConnectorConfiguration;
 import org.apache.archiva.maven2.metadata.MavenMetadataReader;
@@ -34,6 +35,8 @@ import org.apache.archiva.policies.SnapshotsPolicy;
 import org.apache.archiva.repository.metadata.MetadataTools;
 import org.apache.archiva.repository.metadata.RepositoryMetadataException;
 import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.wagon.TransferFailedException;
 import org.custommonkey.xmlunit.DetailedDiff;
@@ -125,7 +128,7 @@ public class MetadataTransferTest
 
         ProjectReference metadata = createProjectReference( requestedResource );
 
-        Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
                                                                      managedDefaultRepository.toMetadataPath(
                                                                          metadata ) ).getFile();
 
@@ -988,7 +991,7 @@ public class MetadataTransferTest
 
         ProjectReference metadata = createProjectReference( requestedResource );
 
-        Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
                                                                      managedDefaultRepository.toMetadataPath(
                                                                          metadata ) ).getFile();
 
@@ -1014,7 +1017,7 @@ public class MetadataTransferTest
         Path expectedFile = managedDefaultDir.resolve(requestedResource);
         ProjectReference metadata = createProjectReference( requestedResource );
 
-        Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
                                                                      managedDefaultRepository.toMetadataPath(
                                                                          metadata ) ).getFile();
 
@@ -1035,7 +1038,7 @@ public class MetadataTransferTest
 
         VersionedReference metadata = createVersionedReference( requestedResource );
 
-        Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
                                                                      managedDefaultRepository.toMetadataPath(
                                                                          metadata ) ).getFile();
 
@@ -1061,7 +1064,7 @@ public class MetadataTransferTest
         Path expectedFile = managedDefaultDir.resolve(requestedResource);
         VersionedReference metadata = createVersionedReference( requestedResource );
 
-        Path downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
+        StorageAsset downloadedFile = proxyHandler.fetchMetadataFromProxies( managedDefaultRepository,
                                                                      managedDefaultRepository.toMetadataPath(
                                                                          metadata ) ).getFile();
 
@@ -1090,7 +1093,9 @@ public class MetadataTransferTest
         assertTrue( "Actual file exists.", Files.exists(actualFile) );
 
         StringWriter actualContents = new StringWriter();
-        ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFile );
+        FilesystemStorage fsStorage = new FilesystemStorage(actualFile.getParent(), new DefaultFileLockManager());
+        StorageAsset actualFileAsset = fsStorage.getAsset(actualFile.getFileName().toString());
+        ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( actualFileAsset );
         RepositoryMetadataWriter.write( metadata, actualContents );
 
         DetailedDiff detailedDiff = new DetailedDiff( new Diff( expectedMetadataXml, actualContents.toString() ) );
index 9ab0585bf6260e94ca15ec03ea97ead922194667..ace9e584a49e2cdef881fe79b01081fc6e51e33c 100644 (file)
@@ -24,6 +24,7 @@ import org.apache.archiva.policies.CachedFailuresPolicy;
 import org.apache.archiva.policies.ChecksumPolicy;
 import org.apache.archiva.policies.ReleasesPolicy;
 import org.apache.archiva.policies.SnapshotsPolicy;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.junit.Test;
 
 import java.nio.file.Files;
@@ -59,8 +60,8 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
-        assertNotDownloaded( downloadedFile );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNoTempFiles( expectedFile );
     }
 
@@ -80,10 +81,10 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -103,10 +104,10 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -128,10 +129,10 @@ public class SnapshotTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false );
 
         // Attempt to download.
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         // Should not have downloaded as managed is newer than remote.
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNoTempFiles( expectedFile );
     }
 
@@ -220,9 +221,9 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -244,9 +245,9 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
-        assertNotDownloaded( downloadedFile );
+        assertNotDownloaded( downloadedFile.getFilePath() );
         assertNotModified( expectedFile, expectedTimestamp );
         assertNoTempFiles( expectedFile );
     }
@@ -270,10 +271,10 @@ public class SnapshotTransferTest
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
                        SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.YES , false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -293,10 +294,10 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 
@@ -320,10 +321,10 @@ public class SnapshotTransferTest
         // Configure Connector (usually done within archiva.xml configuration)
         saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, false);
 
-        Path downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
+        StorageAsset downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
 
         Path proxiedFile = Paths.get(REPOPATH_PROXIED1, path);
-        assertFileEquals( expectedFile, downloadedFile, proxiedFile );
+        assertFileEquals( expectedFile, downloadedFile.getFilePath(), proxiedFile );
         assertNoTempFiles( expectedFile );
     }
 }
index 61ad3d3a0495050dc29678d9ab64a780e74e2355..706511c68b9160dd95a4d12f38ad4cc4740543a7 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.repository.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.VersionUtil;
 import org.apache.archiva.metadata.model.ArtifactMetadata;
 import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
@@ -27,10 +28,12 @@ import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.model.ProjectReference;
 import org.apache.archiva.model.VersionedReference;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 import org.springframework.stereotype.Service;
 
+import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.Map;
@@ -50,6 +53,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
 
 
     private ManagedRepository repository;
+    private FilesystemStorage fsStorage;
 
     ManagedRepositoryContentMock(ManagedRepository repo) {
         this.repository = repo;
@@ -94,7 +98,18 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
     @Override
     public String getRepoRoot( )
     {
-        return Paths.get("", "target", "test-repository", "managed").toString();
+        return getRepoRootAsset().getFilePath().toString();
+    }
+
+    private StorageAsset getRepoRootAsset() {
+        if (fsStorage==null) {
+            try {
+                fsStorage = new FilesystemStorage(Paths.get("", "target", "test-repository", "managed"), new DefaultFileLockManager());
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+        return fsStorage.getAsset("");
     }
 
     @Override
@@ -331,7 +346,7 @@ public class ManagedRepositoryContentMock implements ManagedRepositoryContent
     @Override
     public StorageAsset toFile( ArtifactReference reference )
     {
-        return Paths.get(getRepoRoot(), refs.get(reference));
+        return getRepoRootAsset().resolve( refs.get(reference));
     }
 
     @Override
index 4fee9ee609785779769a0b71ae282736e9609058..05ad9a011faf608165d68d4d59e0a4cb07167071 100644 (file)
@@ -36,6 +36,7 @@ import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.RepositoryRegistry;
 import org.apache.archiva.repository.maven2.MavenSystemManager;
 import org.apache.archiva.repository.metadata.MetadataTools;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.artifact.Artifact;
@@ -55,8 +56,7 @@ import org.springframework.stereotype.Service;
 import javax.annotation.PostConstruct;
 import javax.inject.Inject;
 import javax.inject.Named;
-import java.nio.file.Files;
-import java.nio.file.Path;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -233,22 +233,22 @@ public class Maven3DependencyTreeBuilder
         for ( String repoId : repositoryIds )
         {
             ManagedRepository managedRepo = repositoryRegistry.getManagedRepository(repoId);
-            Path repoDir = managedRepo.getAsset("").getFilePath();
+            StorageAsset repoDir = managedRepo.getAsset("");
 
-            Path file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
+            StorageAsset file = pathTranslator.toFile( repoDir, projectArtifact.getGroupId(), projectArtifact.getArtifactId(),
                                                projectArtifact.getBaseVersion(),
                                                projectArtifact.getArtifactId() + "-" + projectArtifact.getVersion()
                                                    + ".pom" );
 
-            if ( Files.exists(file) )
+            if ( file.exists() )
             {
                 return managedRepo;
             }
             // try with snapshot version
             if ( StringUtils.endsWith( projectArtifact.getBaseVersion(), VersionUtil.SNAPSHOT ) )
             {
-                Path metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
-                if ( Files.exists(metadataFile) )
+                StorageAsset metadataFile = file.getParent().resolve( MetadataTools.MAVEN_METADATA );
+                if ( metadataFile.exists() )
                 {
                     try
                     {
@@ -262,14 +262,14 @@ public class Maven3DependencyTreeBuilder
                                                     "-" + VersionUtil.SNAPSHOT ) ).append( '-' ).append(
                                 timeStamp ).append( '-' ).append( Integer.toString( buildNumber ) ).append(
                                 ".pom" ).toString();
-                        Path timeStampFile = file.getParent().resolve( timeStampFileName );
+                        StorageAsset timeStampFile = file.getParent().resolve( timeStampFileName );
                         log.debug( "try to find timestamped snapshot version file: {}", timeStampFile);
-                        if ( Files.exists(timeStampFile) )
+                        if ( timeStampFile.exists() )
                         {
                             return managedRepo;
                         }
                     }
-                    catch ( XMLException e )
+                    catch (XMLException | IOException e )
                     {
                         log.warn( "skip fail to find timestamped snapshot pom: {}", e.getMessage() );
                     }
index b910368091c8d61b47e08859fbb16c65ccd44e9c..7d728f9fc5777a65477e0a2b4e6f9d5d7ef0bd6b 100644 (file)
@@ -23,6 +23,7 @@ import org.apache.archiva.common.utils.VersionUtil;
 import org.apache.archiva.metadata.model.ArtifactMetadata;
 import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
 import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Service;
@@ -78,13 +79,13 @@ public class Maven2RepositoryPathTranslator
     }
 
     @Override
-    public Path toFile(Path basedir, String namespace, String projectId, String projectVersion, String filename )
+    public StorageAsset toFile(StorageAsset basedir, String namespace, String projectId, String projectVersion, String filename )
     {
         return basedir.resolve( toPath( namespace, projectId, projectVersion, filename ) );
     }
 
     @Override
-    public Path toFile( Path basedir, String namespace, String projectId, String projectVersion )
+    public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId, String projectVersion )
     {
         return basedir.resolve( toPath( namespace, projectId, projectVersion ) );
     }
@@ -148,13 +149,13 @@ public class Maven2RepositoryPathTranslator
     }
 
     @Override
-    public Path toFile( Path basedir, String namespace, String projectId )
+    public StorageAsset toFile( StorageAsset basedir, String namespace, String projectId )
     {
         return basedir.resolve( toPath( namespace, projectId ) );
     }
 
     @Override
-    public Path toFile( Path basedir, String namespace )
+    public StorageAsset toFile( StorageAsset basedir, String namespace )
     {
         return basedir.resolve( toPath( namespace ) );
     }
index e754a72cf9c33387d6b3dbdd9aa51b1a165737d3..6d788c06a9c1c8dc339edabf0c209fac8e8417c4 100644 (file)
@@ -48,6 +48,7 @@ import org.apache.archiva.proxy.model.RepositoryProxyHandler;
 import org.apache.archiva.repository.*;
 import org.apache.archiva.repository.content.PathParser;
 import org.apache.archiva.repository.maven2.MavenSystemManager;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
@@ -80,11 +81,9 @@ import javax.inject.Named;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.Reader;
+import java.nio.channels.Channels;
 import java.nio.charset.Charset;
-import java.nio.file.Files;
 import java.nio.file.NoSuchFileException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -95,7 +94,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 // import java.io.FileNotFoundException;
 
@@ -187,9 +185,9 @@ public class Maven2RepositoryStorage
                 }
             }
         }
-        Path basedir = Paths.get(managedRepository.getLocation());
+        StorageAsset basedir = managedRepository.getAsset("");
         if (VersionUtil.isSnapshot(artifactVersion)) {
-            Path metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
+            StorageAsset metadataFile = pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(),
                     readMetadataRequest.getProjectId(), artifactVersion,
                     METADATA_FILENAME);
             try {
@@ -203,7 +201,7 @@ public class Maven2RepositoryStorage
                     artifactVersion =
                             artifactVersion + snapshotVersion.getTimestamp() + "-" + snapshotVersion.getBuildNumber();
                 }
-            } catch (XMLException e) {
+            } catch (XMLException | IOException e) {
                 // unable to parse metadata - LOGGER it, and continue with the version as the original SNAPSHOT version
                 LOGGER.warn("Invalid metadata: {} - {}", metadataFile, e.getMessage());
             }
@@ -211,14 +209,14 @@ public class Maven2RepositoryStorage
 
         // TODO: won't work well with some other layouts, might need to convert artifact parts to ID by path translator
         String id = readMetadataRequest.getProjectId() + "-" + artifactVersion + ".pom";
-        Path file =
+        StorageAsset file =
                 pathTranslator.toFile(basedir, readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
                         readMetadataRequest.getProjectVersion(), id);
 
-        if (!Files.exists(file)) {
+        if (!file.exists()) {
             // metadata could not be resolved
             throw new RepositoryStorageMetadataNotFoundException(
-                    "The artifact's POM file '" + file.toAbsolutePath() + "' was missing");
+                    "The artifact's POM file '" + file.getPath() + "' was missing");
         }
 
         // TODO: this is a workaround until we can properly resolve using proxies as well - this doesn't cache
@@ -254,7 +252,7 @@ public class Maven2RepositoryStorage
         }
 
         ModelBuildingRequest req =
-                new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.toFile()).setTwoPhaseBuilding(
+                new DefaultModelBuildingRequest().setProcessPlugins(false).setPomFile(file.getFilePath().toFile()).setTwoPhaseBuilding(
                         false).setValidationLevel(ModelBuildingRequest.VALIDATION_LEVEL_MINIMAL);
 
         //MRM-1607. olamy this will resolve jdk profiles on the current running archiva jvm
@@ -460,67 +458,52 @@ public class Maven2RepositoryStorage
     @Override
     public Collection<String> listRootNamespaces(String repoId, Filter<String> filter)
             throws RepositoryStorageRuntimeException {
-        Path dir = getRepositoryBasedir(repoId);
+        StorageAsset dir = getRepositoryBasedir(repoId);
 
         return getSortedFiles(dir, filter);
     }
 
-    private static Collection<String> getSortedFiles(Path dir, Filter<String> filter) {
+    private static Collection<String> getSortedFiles(StorageAsset dir, Filter<String> filter) {
 
-        try (Stream<Path> stream = Files.list(dir)) {
-            final Predicate<Path> dFilter = new DirectoryFilter(filter);
-            return stream.filter(Files::isDirectory)
+            final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
+            return dir.list().stream().filter(f -> f.isContainer())
                     .filter(dFilter)
-                    .map(path -> path.getFileName().toString())
+                    .map(path -> path.getName().toString())
                     .sorted().collect(Collectors.toList());
 
-        } catch (IOException e) {
-            LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
-            return Collections.emptyList();
-        }
     }
 
-    private Path getRepositoryBasedir(String repoId)
+    private StorageAsset getRepositoryBasedir(String repoId)
             throws RepositoryStorageRuntimeException {
         ManagedRepository repositoryConfiguration = repositoryRegistry.getManagedRepository(repoId);
 
-        return Paths.get(repositoryConfiguration.getLocation());
+        return repositoryConfiguration.getAsset("");
     }
 
     @Override
     public Collection<String> listNamespaces(String repoId, String namespace, Filter<String> filter)
             throws RepositoryStorageRuntimeException {
-        Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
-        if (!(Files.exists(dir) && Files.isDirectory(dir))) {
+        StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
+        if (!(dir.exists()) && !dir.isContainer()) {
             return Collections.emptyList();
         }
         // scan all the directories which are potential namespaces. Any directories known to be projects are excluded
-        Predicate<Path> dFilter = new DirectoryFilter(filter);
-        try (Stream<Path> stream = Files.list(dir)) {
-            return stream.filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getFileName().toString())
+        Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
+            return dir.list().stream().filter(dFilter).filter(path -> !isProject(path, filter)).map(path -> path.getName().toString())
                     .sorted().collect(Collectors.toList());
-        } catch (IOException e) {
-            LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
-            return Collections.emptyList();
-        }
     }
 
     @Override
     public Collection<String> listProjects(String repoId, String namespace, Filter<String> filter)
             throws RepositoryStorageRuntimeException {
-        Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
-        if (!(Files.exists(dir) && Files.isDirectory(dir))) {
+        StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace);
+        if (!(dir.exists() && dir.isContainer())) {
             return Collections.emptyList();
         }
         // scan all directories in the namespace, and only include those that are known to be projects
-        final Predicate<Path> dFilter = new DirectoryFilter(filter);
-        try (Stream<Path> stream = Files.list(dir)) {
-            return stream.filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getFileName().toString())
+        final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
+            return dir.list().stream().filter(dFilter).filter(path -> isProject(path, filter)).map(path -> path.getName().toString())
                     .sorted().collect(Collectors.toList());
-        } catch (IOException e) {
-            LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
-            return Collections.emptyList();
-        }
 
     }
 
@@ -528,8 +511,8 @@ public class Maven2RepositoryStorage
     public Collection<String> listProjectVersions(String repoId, String namespace, String projectId,
                                                   Filter<String> filter)
             throws RepositoryStorageRuntimeException {
-        Path dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
-        if (!(Files.exists(dir) && Files.isDirectory(dir))) {
+        StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(repoId), namespace, projectId);
+        if (!(dir.exists() && dir.isContainer())) {
             return Collections.emptyList();
         }
 
@@ -540,18 +523,17 @@ public class Maven2RepositoryStorage
     @Override
     public Collection<ArtifactMetadata> readArtifactsMetadata(ReadMetadataRequest readMetadataRequest)
             throws RepositoryStorageRuntimeException {
-        Path dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
+        StorageAsset dir = pathTranslator.toFile(getRepositoryBasedir(readMetadataRequest.getRepositoryId()),
                 readMetadataRequest.getNamespace(), readMetadataRequest.getProjectId(),
                 readMetadataRequest.getProjectVersion());
-        if (!(Files.exists(dir) && Files.isDirectory(dir))) {
+        if (!(dir.exists() && dir.isContainer())) {
             return Collections.emptyList();
         }
 
         // all files that are not metadata and not a checksum / signature are considered artifacts
-        final Predicate<Path> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
-        try (Stream<Path> stream = Files.list(dir)) {
+        final Predicate<StorageAsset> dFilter = new ArtifactDirectoryFilter(readMetadataRequest.getFilter());
             // Returns a map TRUE -> (success values), FALSE -> (Exceptions)
-            Map<Boolean, List<Try<ArtifactMetadata>>> result = stream.filter(dFilter).map(path -> {
+            Map<Boolean, List<Try<ArtifactMetadata>>> result = dir.list().stream().filter(dFilter).map(path -> {
                         try {
                             return Try.success(getArtifactFromFile(readMetadataRequest.getRepositoryId(), readMetadataRequest.getNamespace(),
                                     readMetadataRequest.getProjectId(), readMetadataRequest.getProjectVersion(),
@@ -573,10 +555,6 @@ public class Maven2RepositoryStorage
                 }
                 return result.get(Boolean.TRUE).stream().map(tr -> tr.get()).collect(Collectors.toList());
             }
-        } catch (IOException e) {
-            LOGGER.error("Could not read directory {}: {}", dir, e.getMessage(), e);
-        }
-        return Collections.emptyList();
 
     }
 
@@ -595,9 +573,9 @@ public class Maven2RepositoryStorage
     }
 
     private ArtifactMetadata getArtifactFromFile(String repoId, String namespace, String projectId,
-                                                 String projectVersion, Path file) throws IOException {
+                                                 String projectVersion, StorageAsset file) throws IOException {
         ArtifactMetadata metadata =
-                pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getFileName().toString());
+                pathTranslator.getArtifactFromId(repoId, namespace, projectId, projectVersion, file.getName());
 
         populateArtifactMetadataFromFile(metadata, file);
 
@@ -629,17 +607,17 @@ public class Maven2RepositoryStorage
         proxyHandler.fetchFromProxies(managedRepository, pomReference);
 
         // Open and read the POM from the managed repo
-        Path pom = managedRepository.toFile(pomReference);
+        StorageAsset pom = managedRepository.toFile(pomReference);
 
-        if (!Files.exists(pom)) {
+        if (!pom.exists()) {
             return;
         }
 
         try {
             // MavenXpp3Reader leaves the file open, so we need to close it ourselves.
 
-            Model model = null;
-            try (Reader reader = Files.newBufferedReader(pom, Charset.defaultCharset())) {
+            Model model;
+            try (Reader reader = Channels.newReader(pom.getReadChannel(), Charset.defaultCharset().name())) {
                 model = MAVEN_XPP_3_READER.read(reader);
             }
 
@@ -708,7 +686,7 @@ public class Maven2RepositoryStorage
 
     @Override
     public String getFilePathWithVersion(final String requestPath, ManagedRepositoryContent managedRepositoryContent)
-            throws XMLException, RelocationException {
+            throws RelocationException, XMLException, IOException {
 
         if (StringUtils.endsWith(requestPath, METADATA_FILENAME)) {
             return getFilePath(requestPath, managedRepositoryContent.getRepository());
@@ -725,12 +703,12 @@ public class Maven2RepositoryStorage
 
         if (StringUtils.endsWith(artifactReference.getVersion(), VersionUtil.SNAPSHOT)) {
             // read maven metadata to get last timestamp
-            Path metadataDir = Paths.get(managedRepositoryContent.getRepoRoot(), filePath).getParent();
-            if (!Files.exists(metadataDir)) {
+            StorageAsset metadataDir = managedRepositoryContent.getRepository().getAsset( filePath).getParent();
+            if (!metadataDir.exists()) {
                 return filePath;
             }
-            Path metadataFile = metadataDir.resolve(METADATA_FILENAME);
-            if (!Files.exists(metadataFile)) {
+            StorageAsset metadataFile = metadataDir.resolve(METADATA_FILENAME);
+            if (!metadataFile.exists()) {
                 return filePath;
             }
             ArchivaRepositoryMetadata archivaRepositoryMetadata = MavenMetadataReader.read(metadataFile);
@@ -788,10 +766,10 @@ public class Maven2RepositoryStorage
         return joinedString;
     }
 
-    private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, Path file) throws IOException {
+    private static void populateArtifactMetadataFromFile(ArtifactMetadata metadata, StorageAsset file) throws IOException {
         metadata.setWhenGathered(new Date());
-        metadata.setFileLastModified(Files.getLastModifiedTime(file).toMillis());
-        ChecksummedFile checksummedFile = new ChecksummedFile(file);
+        metadata.setFileLastModified(file.getModificationTime().toEpochMilli());
+        ChecksummedFile checksummedFile = new ChecksummedFile(file.getFilePath());
         try {
             metadata.setMd5(checksummedFile.calculateChecksum(ChecksumAlgorithm.MD5));
         } catch (IOException e) {
@@ -802,52 +780,43 @@ public class Maven2RepositoryStorage
         } catch (IOException e) {
             LOGGER.error("Unable to checksum file {}: {},SHA1", file, e.getMessage());
         }
-        metadata.setSize(Files.size(file));
+        metadata.setSize(file.getSize());
     }
 
-    private boolean isProject(Path dir, Filter<String> filter) {
+    private boolean isProject(StorageAsset dir, Filter<String> filter) {
         // scan directories for a valid project version subdirectory, meaning this must be a project directory
-        final Predicate<Path> dFilter = new DirectoryFilter(filter);
-        try (Stream<Path> stream = Files.list(dir)) {
-            boolean projFound = stream.filter(dFilter)
+        final Predicate<StorageAsset> dFilter = new DirectoryFilter(filter);
+            boolean projFound =  dir.list().stream().filter(dFilter)
                     .anyMatch(path -> isProjectVersion(path));
             if (projFound) {
                 return true;
             }
-        } catch (IOException e) {
-            LOGGER.error("Could not read directory list {}: {}", dir, e.getMessage(), e);
-        }
 
         // if a metadata file is present, check if this is the "artifactId" directory, marking it as a project
         ArchivaRepositoryMetadata metadata = readMetadata(dir);
-        if (metadata != null && dir.getFileName().toString().equals(metadata.getArtifactId())) {
+        if (metadata != null && dir.getName().toString().equals(metadata.getArtifactId())) {
             return true;
         }
 
         return false;
     }
 
-    private boolean isProjectVersion(Path dir) {
-        final String artifactId = dir.getParent().getFileName().toString();
-        final String projectVersion = dir.getFileName().toString();
+    private boolean isProjectVersion(StorageAsset dir) {
+        final String artifactId = dir.getParent().getName();
+        final String projectVersion = dir.getName();
 
         // check if there is a POM artifact file to ensure it is a version directory
 
-        Predicate<Path> filter;
+        Predicate<StorageAsset> filter;
         if (VersionUtil.isSnapshot(projectVersion)) {
             filter = new PomFilenameFilter(artifactId, projectVersion);
         } else {
             final String pomFile = artifactId + "-" + projectVersion + ".pom";
             filter = new PomFileFilter(pomFile);
         }
-        try (Stream<Path> stream = Files.list(dir)) {
-            if (stream.filter(Files::isRegularFile).anyMatch(filter)) {
+            if (dir.list().stream().filter(f -> !f.isContainer()).anyMatch(filter)) {
                 return true;
             }
-        } catch (IOException e) {
-            LOGGER.error("Could not list directory {}: {}", dir, e.getMessage(), e);
-        }
-
         // if a metadata file is present, check if this is the "version" directory, marking it as a project version
         ArchivaRepositoryMetadata metadata = readMetadata(dir);
         if (metadata != null && projectVersion.equals(metadata.getVersion())) {
@@ -857,13 +826,13 @@ public class Maven2RepositoryStorage
         return false;
     }
 
-    private ArchivaRepositoryMetadata readMetadata(Path directory) {
+    private ArchivaRepositoryMetadata readMetadata(StorageAsset directory) {
         ArchivaRepositoryMetadata metadata = null;
-        Path metadataFile = directory.resolve(METADATA_FILENAME);
-        if (Files.exists(metadataFile)) {
+        StorageAsset metadataFile = directory.resolve(METADATA_FILENAME);
+        if (metadataFile.exists()) {
             try {
                 metadata = MavenMetadataReader.read(metadataFile);
-            } catch (XMLException e) {
+            } catch (XMLException | IOException e) {
                 // ignore missing or invalid metadata
             }
         }
@@ -871,7 +840,7 @@ public class Maven2RepositoryStorage
     }
 
     private static class DirectoryFilter
-            implements Predicate<Path> {
+            implements Predicate<StorageAsset> {
         private final Filter<String> filter;
 
         public DirectoryFilter(Filter<String> filter) {
@@ -879,13 +848,13 @@ public class Maven2RepositoryStorage
         }
 
         @Override
-        public boolean test(Path dir) {
-            final String name = dir.getFileName().toString();
+        public boolean test(StorageAsset dir) {
+            final String name = dir.getName();
             if (!filter.accept(name)) {
                 return false;
             } else if (name.startsWith(".")) {
                 return false;
-            } else if (!Files.isDirectory(dir)) {
+            } else if (!dir.isContainer()) {
                 return false;
             }
             return true;
@@ -893,7 +862,7 @@ public class Maven2RepositoryStorage
     }
 
     private static class ArtifactDirectoryFilter
-            implements Predicate<Path> {
+            implements Predicate<StorageAsset> {
         private final Filter<String> filter;
 
         private ArtifactDirectoryFilter(Filter<String> filter) {
@@ -901,8 +870,8 @@ public class Maven2RepositoryStorage
         }
 
         @Override
-        public boolean test(Path dir) {
-            final String name = dir.getFileName().toString();
+        public boolean test(StorageAsset dir) {
+            final String name = dir.getName().toString();
             // TODO compare to logic in maven-repository-layer
             if (!filter.accept(name)) {
                 return false;
@@ -912,7 +881,7 @@ public class Maven2RepositoryStorage
                 return false;
             } else if (Arrays.binarySearch(IGNORED_FILES, name) >= 0) {
                 return false;
-            } else if (Files.isDirectory(dir)) {
+            } else if (dir.isContainer()) {
                 return false;
             }
             // some files from remote repositories can have name like maven-metadata-archiva-vm-all-public.xml
@@ -927,7 +896,7 @@ public class Maven2RepositoryStorage
 
 
     private static final class PomFilenameFilter
-            implements Predicate<Path> {
+            implements Predicate<StorageAsset> {
 
         private final String artifactId, projectVersion;
 
@@ -937,8 +906,8 @@ public class Maven2RepositoryStorage
         }
 
         @Override
-        public boolean test(Path dir) {
-            final String name = dir.getFileName().toString();
+        public boolean test(StorageAsset dir) {
+            final String name = dir.getName();
             if (name.startsWith(artifactId + "-") && name.endsWith(".pom")) {
                 String v = name.substring(artifactId.length() + 1, name.length() - 4);
                 v = VersionUtil.getBaseVersion(v);
@@ -952,7 +921,7 @@ public class Maven2RepositoryStorage
     }
 
     private static class PomFileFilter
-            implements Predicate<Path> {
+            implements Predicate<StorageAsset> {
         private final String pomFile;
 
         private PomFileFilter(String pomFile) {
@@ -960,8 +929,8 @@ public class Maven2RepositoryStorage
         }
 
         @Override
-        public boolean test(Path dir) {
-            return pomFile.equals(dir.getFileName().toString());
+        public boolean test(StorageAsset dir) {
+            return pomFile.equals(dir.getName());
         }
     }
 
index f978d554f3eb03d95b7bc8f8895220e5d4c3a7eb..b03beed9f9e27ff5cb1f6d8202ccd4f5f54e89d2 100644 (file)
@@ -32,6 +32,7 @@ import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.RepositoryCredentials;
 import org.apache.archiva.repository.maven2.MavenSystemManager;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.http.auth.UsernamePasswordCredentials;
@@ -78,7 +79,7 @@ public class RepositoryModelResolver
     private RepositorySystemSession session;
     private VersionRangeResolver versionRangeResolver;
 
-    private Path basedir;
+    private StorageAsset basedir;
 
     private RepositoryPathTranslator pathTranslator;
 
@@ -98,7 +99,7 @@ public class RepositoryModelResolver
 
     private ManagedRepository managedRepository;
 
-    public RepositoryModelResolver( Path basedir, RepositoryPathTranslator pathTranslator )
+    public RepositoryModelResolver(StorageAsset basedir, RepositoryPathTranslator pathTranslator )
     {
         this.basedir = basedir;
 
@@ -110,7 +111,7 @@ public class RepositoryModelResolver
                                    Map<String, NetworkProxy> networkProxiesMap, ManagedRepository targetRepository,
                                    MavenSystemManager mavenSystemManager)
     {
-        this( Paths.get( managedRepository.getLocation() ), pathTranslator );
+        this( managedRepository.getAsset(""), pathTranslator );
 
         this.managedRepository = managedRepository;
 
@@ -138,9 +139,9 @@ public class RepositoryModelResolver
         String filename = artifactId + "-" + version + ".pom";
         // TODO: we need to convert 1.0-20091120.112233-1 type paths to baseVersion for the below call - add a test
 
-        Path model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );
+        StorageAsset model = pathTranslator.toFile( basedir, groupId, artifactId, version, filename );
 
-        if ( !Files.exists(model) )
+        if ( !model.exists() )
         {
             /**
              *
@@ -161,10 +162,10 @@ public class RepositoryModelResolver
                 try
                 {
                     boolean success = getModelFromProxy( remoteRepository, groupId, artifactId, version, filename );
-                    if ( success && Files.exists(model) )
+                    if ( success && model.exists() )
                     {
                         log.info( "Model '{}' successfully retrieved from remote repository '{}'",
-                                  model.toAbsolutePath(), remoteRepository.getId() );
+                                  model.getPath(), remoteRepository.getId() );
                         break;
                     }
                 }
@@ -172,20 +173,20 @@ public class RepositoryModelResolver
                 {
                     log.info(
                         "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
-                        model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
+                        model.getPath(), remoteRepository.getId(), e.getMessage() );
                 }
                 catch ( Exception e )
                 {
                     log.warn(
                         "An exception was caught while attempting to retrieve model '{}' from remote repository '{}'.Reason:{}",
-                        model.toAbsolutePath(), remoteRepository.getId(), e.getMessage() );
+                        model.getPath(), remoteRepository.getId(), e.getMessage() );
 
                     continue;
                 }
             }
         }
 
-        return new FileModelSource( model.toFile() );
+        return new FileModelSource( model.getFilePath().toFile() );
     }
 
     public ModelSource resolveModel(Parent parent) throws UnresolvableModelException {
@@ -249,15 +250,15 @@ public class RepositoryModelResolver
                     log.debug( "use snapshot path {} for maven coordinate {}:{}:{}", snapshotPath, groupId, artifactId,
                                version );
 
-                    Path model = basedir.resolve( snapshotPath );
+                    StorageAsset model = basedir.resolve( snapshotPath );
                     //model = pathTranslator.toFile( basedir, groupId, artifactId, lastVersion, filename );
-                    if ( Files.exists(model) )
+                    if ( model.exists() )
                     {
-                        return model;
+                        return model.getFilePath();
                     }
                 }
             }
-            catch ( XMLException e )
+            catch (XMLException | IOException e )
             {
                 log.warn( "fail to read {}, {}", mavenMetadata.toAbsolutePath(), e.getCause() );
             }
index 9c36647d0799b850c194b048c7ef1dac73cc9f06..266a84105f6a44a641f8aa0c0a6eff09b764c107 100644 (file)
@@ -34,7 +34,7 @@ import org.apache.archiva.repository.LayoutException;
 import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RepositoryException;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
 
 import java.io.IOException;
index 026990e876a9785da4eb291eb90a9cc378de4ccc..f8e171dacae7bbf0d13b259da3d5f58390244ee7 100644 (file)
@@ -23,8 +23,7 @@ import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.FilesystemStorage;
-import org.apache.archiva.repository.content.RepositoryStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.content.maven2.MavenRepositoryRequestInfo;
 import org.apache.archiva.repository.features.ArtifactCleanupFeature;
 import org.apache.archiva.repository.features.IndexCreationFeature;
@@ -38,7 +37,6 @@ import java.net.URI;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Locale;
-import java.util.function.Function;
 
 /**
  * Maven2 managed repository implementation.
index f3380c0ec5ab3267b99b7ad783f3870956942951..e89321704f7df17bc85d76a2cdd211446feba672 100644 (file)
@@ -9,7 +9,7 @@ import org.apache.archiva.repository.RepositoryCapabilities;
 import org.apache.archiva.repository.RepositoryType;
 import org.apache.archiva.repository.StandardCapabilities;
 import org.apache.archiva.repository.UnsupportedFeatureException;
-import org.apache.archiva.repository.content.FilesystemStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
 import org.apache.archiva.repository.features.RepositoryFeature;
index 56e65a2bdf63688d378c45c1880a9c6a6b8f881c..60c91ac7136c5ecb094dab98de53364cfaab88f3 100644 (file)
@@ -22,13 +22,12 @@ package org.apache.archiva.repository.maven2;
 import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.FilesystemStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Locale;
 
index bbcb6585db4d4e3d2c8b470b45f2ab20505bdfe5..75b2117a278853ce6fdba0352eb0464c349cd435 100644 (file)
@@ -22,8 +22,7 @@ package org.apache.archiva.repository.maven2;
 import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.configuration.*;
 import org.apache.archiva.repository.*;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.FilesystemStorage;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.repository.features.ArtifactCleanupFeature;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
@@ -298,11 +297,19 @@ public class MavenRepositoryProvider implements RepositoryProvider {
             IndexCreationFeature indexCreationFeature = repositoryGroup.getFeature( IndexCreationFeature.class ).get();
             indexCreationFeature.setIndexPath( getURIFromString(configuration.getMergedIndexPath()) );
             Path localPath = Paths.get(configuration.getMergedIndexPath());
-            if (localPath.isAbsolute()) {
-                indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.getFileName().toString(), localPath) );
+            Path repoGroupPath = repositoryGroup.getAsset("").getFilePath().toAbsolutePath();
+            if (localPath.isAbsolute() && !localPath.startsWith(repoGroupPath)) {
+                try {
+                    FilesystemStorage storage = new FilesystemStorage(localPath.getParent(), fileLockManager);
+                    indexCreationFeature.setLocalIndexPath(storage.getAsset(localPath.getFileName().toString()));
+                } catch (IOException e) {
+                    throw new RepositoryException("Could not initialize storage for index path "+localPath);
+                }
+            } else if (localPath.isAbsolute()) {
+                indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(repoGroupPath.relativize(localPath).toString()));
             } else
             {
-                indexCreationFeature.setLocalIndexPath( new FilesystemAsset(localPath.toString(), archivaConfiguration.getRepositoryGroupBaseDir( ).resolve( localPath )));
+                indexCreationFeature.setLocalIndexPath(repositoryGroup.getAsset(localPath.toString()));
             }
         }
         // References to other repositories are set filled by the registry
index e2fec5010b02ccf5e1776622f93c2cb94a2530e1..769be5ef9fe2e3eed538e3469e2067f92e125048 100644 (file)
@@ -29,6 +29,7 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
+import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.Arrays;
@@ -46,8 +47,7 @@ public class MavenRepositoryMetadataReaderTest
 
     @Test
     public void testGroupMetadata()
-        throws XMLException
-    {
+            throws XMLException, IOException {
         Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/plugins/maven-metadata.xml" );
 
         ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );
@@ -81,8 +81,7 @@ public class MavenRepositoryMetadataReaderTest
 
     @Test
     public void testProjectMetadata()
-        throws XMLException
-    {
+            throws XMLException, IOException {
         Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );
 
         ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile);
@@ -99,8 +98,7 @@ public class MavenRepositoryMetadataReaderTest
 
     @Test
     public void testProjectVersionMetadata()
-        throws XMLException
-    {
+            throws XMLException, IOException {
         Path metadataFile = defaultRepoDir.resolve( "org/apache/apache/5-SNAPSHOT/maven-metadata.xml" );
 
         ArchivaRepositoryMetadata metadata = MavenMetadataReader.read( metadataFile );
index 171563add2651681fd22eb07d189eecf4a4a34a1..53bb59308a33f05d4e6a30cfaf528792e51164c3 100644 (file)
@@ -19,7 +19,7 @@ package org.apache.archiva.repository.index.mock;
  * under the License.
  */
 
-import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -40,8 +40,9 @@ import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryType;
 import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
 import org.apache.commons.lang.StringUtils;
@@ -145,7 +146,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
 
     private Path getIndexPath( ArchivaIndexingContext ctx )
     {
-        return PathUtil.getPathFromUri( ctx.getPath( ) );
+        return ctx.getPath( ).getFilePath();
     }
 
     @FunctionalInterface
@@ -380,9 +381,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.addArtifactsToIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -396,9 +397,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -442,7 +443,12 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
             throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
                     + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
         }
-        MavenIndexContextMock context = new MavenIndexContextMock( repository, mvnCtx );
+        MavenIndexContextMock context = null;
+        try {
+            context = new MavenIndexContextMock( repository, mvnCtx );
+        } catch (IOException e) {
+            throw new IndexCreationFailedException(e);
+        }
 
         return context;
     }
@@ -457,7 +463,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
                 log.warn("Index close failed");
             }
             try {
-                FileUtils.deleteDirectory(Paths.get(context.getPath()));
+                FileUtils.deleteDirectory(context.getPath().getFilePath());
             } catch (IOException e) {
                 throw new IndexUpdateFailedException("Could not delete index files");
             }
@@ -527,12 +533,14 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     }
 
 
+
     private StorageAsset getIndexPath( Repository repo) throws IOException {
         IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
         Path repoDir = repo.getAsset( "" ).getFilePath();
         URI indexDir = icf.getIndexPath();
         String indexPath = indexDir.getPath();
         Path indexDirectory = null;
+        FilesystemStorage fsStorage = (FilesystemStorage) repo.getAsset("").getStorage();
         if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
         {
 
@@ -541,6 +549,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
             if ( indexDirectory.isAbsolute( ) )
             {
                 indexPath = indexDirectory.getFileName().toString();
+                fsStorage = new FilesystemStorage(indexDirectory.getParent(), new DefaultFileLockManager());
             }
             else
             {
@@ -557,7 +566,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
         {
             Files.createDirectories( indexDirectory );
         }
-        return new FilesystemAsset( indexPath, indexDirectory );
+        return new FilesystemAsset( fsStorage, indexPath, indexDirectory );
     }
 
     private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
index e0db09597d7e45a0756b2b48ab1230ab9d49890f..a6dddae4edd7d949f972df92f34aa6dab020bcbf 100644 (file)
@@ -19,12 +19,14 @@ package org.apache.archiva.repository.index.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.index.context.IndexingContext;
 
 import java.io.IOException;
-import java.net.URI;
 import java.nio.file.Files;
 import java.nio.file.NoSuchFileException;
 import java.sql.Date;
@@ -38,10 +40,12 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
 
     private IndexingContext delegate;
     private Repository repository;
+    private FilesystemStorage indexStorage;
 
-    MavenIndexContextMock(Repository repository, IndexingContext delegate) {
+    MavenIndexContextMock(Repository repository, IndexingContext delegate) throws IOException {
         this.delegate = delegate;
         this.repository = repository;
+        indexStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath(), new DefaultFileLockManager());
 
     }
 
@@ -56,8 +60,8 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
     }
 
     @Override
-    public URI getPath() {
-        return delegate.getIndexDirectoryFile().toURI();
+    public StorageAsset getPath() {
+        return indexStorage.getAsset("");
     }
 
     @Override
index 7c7da49cfe9f2310b193b7cb4b5fa2f0ce9c0e10..a02814042a5f65ed3541c61e17a9fc547995df52 100644 (file)
@@ -27,6 +27,7 @@ import org.apache.archiva.xml.XMLException;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
+import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 
@@ -41,8 +42,7 @@ public class RepositoryMetadataReaderTest
 {
     @Test
     public void testLoadSimple()
-        throws XMLException
-    {
+            throws XMLException, IOException {
         Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
         Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/shared/maven-downloader/maven-metadata.xml" );
 
@@ -59,8 +59,7 @@ public class RepositoryMetadataReaderTest
 
     @Test
     public void testLoadComplex()
-        throws XMLException
-    {
+            throws XMLException, IOException {
         Path defaultRepoDir = Paths.get( "src/test/repositories/default-repository" );
         Path metadataFile = defaultRepoDir.resolve( "org/apache/maven/samplejar/maven-metadata.xml" );
 
index a890ca4f691188747fbf2c1c30ed591d3f50ae3a..b4a8e05cefe3b08ce859a9d097b69c4d21da72e3 100644 (file)
@@ -26,7 +26,7 @@ import org.apache.archiva.repository.BasicManagedRepository;
 import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.ReleaseScheme;
 import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.scheduler.indexing.ArtifactIndexingTask;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
index a858c1a844e0439af5c5413d3f8268efc7ac3020..511ce61497c5a9cdcdc76115c03e6f87d644c779 100644 (file)
@@ -1,9 +1,7 @@
 package org.apache.archiva.scheduler.repository.model;
 
 import org.apache.archiva.redback.components.taskqueue.Task;
-import org.apache.archiva.repository.content.StorageAsset;
-
-import java.nio.file.Path;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 
 /*
index 60f0a7b75f7488814ecb8f01794c5e3909182634..90d74247513957ba5ce9123ce65b5c4c98aeed04 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.common.utils.PathUtil;
 import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -27,7 +28,6 @@ import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.indexer.IndexCreationFailedException;
 import org.apache.archiva.indexer.IndexUpdateFailedException;
 import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.proxy.ProxyRegistry;
 import org.apache.archiva.proxy.maven.WagonFactory;
 import org.apache.archiva.proxy.maven.WagonFactoryException;
 import org.apache.archiva.proxy.maven.WagonFactoryRequest;
@@ -39,10 +39,12 @@ import org.apache.archiva.repository.RemoteRepository;
 import org.apache.archiva.repository.Repository;
 import org.apache.archiva.repository.RepositoryType;
 import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.maven.index.ArtifactContext;
 import org.apache.maven.index.ArtifactContextProducer;
@@ -140,7 +142,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
 
     private Path getIndexPath( ArchivaIndexingContext ctx )
     {
-        return PathUtil.getPathFromUri( ctx.getPath( ) );
+        return ctx.getPath().getFilePath();
     }
 
     @FunctionalInterface
@@ -365,9 +367,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.addArtifactsToIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -381,9 +383,9 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
     @Override
     public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
     {
-        final URI ctxUri = context.getPath();
+        final StorageAsset ctxUri = context.getPath();
         executeUpdateFunction(context, indexingContext -> {
-            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+            Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.getFilePath().toUri().resolve(r)).toFile())).collect(Collectors.toList());
             try {
                 indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
             } catch (IOException e) {
@@ -442,7 +444,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
                 log.warn("Index close failed");
             }
             try {
-                FileUtils.deleteDirectory(Paths.get(context.getPath()));
+                StorageUtil.deleteRecursively(context.getPath());
             } catch (IOException e) {
                 throw new IndexUpdateFailedException("Could not delete index files");
             }
@@ -517,6 +519,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
         URI indexDir = icf.getIndexPath();
         String indexPath = indexDir.getPath();
         Path indexDirectory = null;
+        FilesystemStorage filesystemStorage = (FilesystemStorage) repo.getAsset("").getStorage();
         if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
         {
 
@@ -525,6 +528,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
             if ( indexDirectory.isAbsolute( ) )
             {
                 indexPath = indexDirectory.getFileName().toString();
+                filesystemStorage = new FilesystemStorage(indexDirectory, new DefaultFileLockManager());
             }
             else
             {
@@ -541,7 +545,7 @@ public class ArchivaIndexManagerMock implements ArchivaIndexManager {
         {
             Files.createDirectories( indexDirectory );
         }
-        return new FilesystemAsset( indexPath, indexDirectory);
+        return new FilesystemAsset( filesystemStorage, indexPath, indexDirectory);
     }
 
     private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
index b3ae99c7b103aca9287c6161ad62ca14700819f4..fa070a7d9f12158aa6f6d2862ca2cd7369f9d38d 100644 (file)
@@ -19,8 +19,12 @@ package org.apache.archiva.mock;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.indexer.ArchivaIndexingContext;
 import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.maven.index.context.IndexingContext;
 
 import java.io.IOException;
@@ -38,10 +42,16 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
 
     private IndexingContext delegate;
     private Repository repository;
+    private FilesystemStorage filesystemStorage;
 
     MavenIndexContextMock( Repository repository, IndexingContext delegate) {
         this.delegate = delegate;
         this.repository = repository;
+        try {
+            filesystemStorage = new FilesystemStorage(delegate.getIndexDirectoryFile().toPath().getParent(), new DefaultFileLockManager());
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
 
     }
 
@@ -56,8 +66,9 @@ public class MavenIndexContextMock implements ArchivaIndexingContext {
     }
 
     @Override
-    public URI getPath() {
-        return delegate.getIndexDirectoryFile().toURI();
+    public StorageAsset getPath() {
+        return new FilesystemAsset(filesystemStorage, delegate.getIndexDirectoryFile().toPath().getFileName().toString(), delegate.getIndexDirectoryFile().toPath());
+
     }
 
     @Override
index 1067fc24ae2efa25d16958430f85846e5aa1548c..403377b42bb8fdbd8d7fc4155a6d50c73be7eb1a 100644 (file)
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-security</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva</groupId>
       <artifactId>archiva-repository-admin-api</artifactId>
index 9488261b45e525c03579983d96b289dae9065efb..56117836d57dcafa07eeb3a5f9f19196153f5578 100644 (file)
@@ -43,6 +43,8 @@ import org.apache.archiva.repository.ReleaseScheme;
 import org.apache.archiva.repository.RepositoryException;
 import org.apache.archiva.repository.RepositoryNotFoundException;
 import org.apache.archiva.repository.metadata.MetadataTools;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.archiva.rest.api.model.*;
 import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
 import org.apache.archiva.rest.api.services.BrowseService;
@@ -62,6 +64,8 @@ import java.io.InputStream;
 import java.nio.charset.Charset;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.StandardOpenOption;
 import java.util.*;
 import java.util.jar.JarEntry;
 import java.util.jar.JarFile;
@@ -696,8 +700,8 @@ public class DefaultBrowseService
                 ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
                                                                        StringUtils.isEmpty( type ) ? "jar" : type,
                                                                        repoId );
-                Path file = managedRepositoryContent.toFile( archivaArtifact );
-                if ( Files.exists(file) )
+                StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
+                if ( file.exists() )
                 {
                     return readFileEntries( file, path, repoId );
                 }
@@ -781,8 +785,8 @@ public class DefaultBrowseService
                 ArchivaArtifact archivaArtifact = new ArchivaArtifact( groupId, artifactId, version, classifier,
                                                                        StringUtils.isEmpty( type ) ? "jar" : type,
                                                                        repoId );
-                Path file = managedRepositoryContent.toFile( archivaArtifact );
-                if ( !Files.exists(file) )
+                StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
+                if ( !file.exists() )
                 {
                     log.debug( "file: {} not exists for repository: {} try next repository", file, repoId );
                     continue;
@@ -790,7 +794,8 @@ public class DefaultBrowseService
                 if ( StringUtils.isNotBlank( path ) )
                 {
                     // zip entry of the path -> path must a real file entry of the archive
-                    JarFile jarFile = new JarFile( file.toFile() );
+                    StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
+                    JarFile jarFile = new JarFile( pathInfo.getPath().toFile());
                     ZipEntry zipEntry = jarFile.getEntry( path );
                     try (InputStream inputStream = jarFile.getInputStream( zipEntry ))
                     {
@@ -799,9 +804,14 @@ public class DefaultBrowseService
                     finally
                     {
                         closeQuietly( jarFile );
+                        if (pathInfo.isTmpFile()) {
+                            Files.deleteIfExists(pathInfo.getPath());
+                        }
                     }
                 }
-                return new ArtifactContent( new String(Files.readAllBytes( file ), ARTIFACT_CONTENT_ENCODING), repoId );
+                try(InputStream readStream = file.getReadStream()) {
+                    return new ArtifactContent(IOUtils.toString(readStream, ARTIFACT_CONTENT_ENCODING), repoId);
+                }
             }
         }
         catch ( IOException e )
@@ -846,9 +856,9 @@ public class DefaultBrowseService
                                                                        StringUtils.isEmpty( classifier )
                                                                            ? ""
                                                                            : classifier, "jar", repoId );
-                Path file = managedRepositoryContent.toFile( archivaArtifact );
+                StorageAsset file = managedRepositoryContent.toFile( archivaArtifact );
 
-                if ( file != null && Files.exists(file) )
+                if ( file != null && file.exists() )
                 {
                     return true;
                 }
@@ -856,8 +866,8 @@ public class DefaultBrowseService
                 // in case of SNAPSHOT we can have timestamped version locally !
                 if ( StringUtils.endsWith( version, VersionUtil.SNAPSHOT ) )
                 {
-                    Path metadataFile = file.getParent().resolve(MetadataTools.MAVEN_METADATA );
-                    if ( Files.exists(metadataFile) )
+                    StorageAsset metadataFile = file.getStorage().getAsset(file.getParent().getPath()+"/"+MetadataTools.MAVEN_METADATA );
+                    if ( metadataFile.exists() )
                     {
                         try
                         {
@@ -873,14 +883,14 @@ public class DefaultBrowseService
                                 .append( ( StringUtils.isEmpty( classifier ) ? "" : "-" + classifier ) ) //
                                 .append( ".jar" ).toString();
 
-                            Path timeStampFile = file.getParent().resolve( timeStampFileName );
-                            log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.toAbsolutePath() );
-                            if ( Files.exists(timeStampFile) )
+                            StorageAsset timeStampFile = file.getStorage().getAsset(file.getParent().getPath() + "/" + timeStampFileName );
+                            log.debug( "try to find timestamped snapshot version file: {}", timeStampFile.getPath() );
+                            if ( timeStampFile.exists() )
                             {
                                 return true;
                             }
                         }
-                        catch ( XMLException e )
+                        catch (XMLException | IOException e )
                         {
                             log.warn( "skip fail to find timestamped snapshot file: {}", e.getMessage() );
                         }
@@ -891,7 +901,7 @@ public class DefaultBrowseService
 
                 file = proxyHandler.fetchFromProxies( managedRepositoryContent, path );
 
-                if ( file != null && Files.exists(file) )
+                if ( file != null && file.exists() )
                 {
                     // download pom now
                     String pomPath = StringUtils.substringBeforeLast( path, ".jar" ) + ".pom";
@@ -1075,7 +1085,7 @@ public class DefaultBrowseService
         }
     }
 
-    protected List<ArtifactContentEntry> readFileEntries(final Path file, final String filterPath, final String repoId )
+    protected List<ArtifactContentEntry> readFileEntries(final StorageAsset file, final String filterPath, final String repoId )
         throws IOException
     {
         String cleanedfilterPath = filterPath==null ? "" : (StringUtils.startsWith(filterPath, "/") ?
@@ -1085,7 +1095,9 @@ public class DefaultBrowseService
         if (!StringUtils.endsWith(cleanedfilterPath,"/") && !StringUtils.isEmpty(cleanedfilterPath)) {
             filterDepth++;
         }
-        JarFile jarFile = new JarFile( file.toFile() );
+
+        StorageUtil.PathInformation pathInfo = StorageUtil.getAssetDataAsPath(file);
+        JarFile jarFile = new JarFile(pathInfo.getPath().toFile());
         try
         {
             Enumeration<JarEntry> jarEntryEnumeration = jarFile.entries();
@@ -1141,6 +1153,9 @@ public class DefaultBrowseService
             {
                 jarFile.close();
             }
+            if (pathInfo.isTmpFile()) {
+                Files.deleteIfExists(pathInfo.getPath());
+            }
         }
         List<ArtifactContentEntry> sorted = new ArrayList<>( artifactContentEntryMap.values() );
         Collections.sort( sorted, ArtifactContentEntryComparator.INSTANCE );
index f3dd25569c1dc1df219ef65fa541023c6cb0c63c..b2d23dde8dabf21824df2b5432c08aa5cdd6daee 100644 (file)
@@ -54,9 +54,9 @@ import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.RepositoryException;
 import org.apache.archiva.repository.RepositoryNotFoundException;
 import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
-import org.apache.archiva.repository.content.StorageUtil;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.archiva.repository.events.RepositoryListener;
 import org.apache.archiva.repository.metadata.MetadataTools;
 import org.apache.archiva.repository.metadata.RepositoryMetadataException;
@@ -89,11 +89,10 @@ import javax.inject.Inject;
 import javax.inject.Named;
 import javax.ws.rs.core.Response;
 import java.io.IOException;
-import java.nio.file.FileSystems;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.nio.file.Files;
 import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -522,7 +521,7 @@ public class DefaultRepositoriesService
             {
                 metadata = MavenMetadataReader.read( metadataFile.getFilePath() );
             }
-            catch ( XMLException e )
+            catch (XMLException | IOException e )
             {
                 throw new RepositoryMetadataException( e.getMessage(), e );
             }
@@ -543,7 +542,7 @@ public class DefaultRepositoriesService
         throws IOException
     {
 
-        StorageUtil.copyAsset( sourceStorage, sourceFile, targetStorage, targetPath, true );
+        StorageUtil.copyAsset( sourceFile, targetPath, true );
         if ( fixChecksums )
         {
             fixChecksums( targetPath );
@@ -612,7 +611,11 @@ public class DefaultRepositoriesService
             projectMetadata.setReleasedVersion( latestVersion );
         }
 
-        RepositoryMetadataWriter.write( projectMetadata, projectMetadataFile.getFilePath());
+        try(OutputStreamWriter writer = new OutputStreamWriter(projectMetadataFile.getWriteStream(true))) {
+            RepositoryMetadataWriter.write(projectMetadata, writer);
+        } catch (IOException e) {
+            throw new RepositoryMetadataException(e);
+        }
 
         if ( fixChecksums )
         {
@@ -1177,7 +1180,11 @@ public class DefaultRepositoriesService
         metadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
         metadata.setAvailableVersions( availableVersions );
 
-        RepositoryMetadataWriter.write( metadata, metadataFile.getFilePath());
+        try (OutputStreamWriter writer = new OutputStreamWriter(metadataFile.getWriteStream(true))) {
+            RepositoryMetadataWriter.write(metadata, writer);
+        } catch (IOException e) {
+            throw new RepositoryMetadataException(e);
+        }
         ChecksummedFile checksum = new ChecksummedFile( metadataFile.getFilePath() );
         checksum.fixChecksums( algorithms );
     }
index 7544dd35545ef5b3b9b93aebd03de40471d0379c..2c6db17bf81d1e620ebfd0fdf3ecd496204d8e21 100644 (file)
@@ -23,6 +23,8 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
 import org.apache.archiva.metadata.model.maven2.MavenArtifactFacet;
 import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.storage.StorageAsset;
+import org.apache.archiva.repository.storage.StorageUtil;
 import org.apache.commons.io.FilenameUtils;
 
 import java.nio.file.Path;
@@ -79,7 +81,7 @@ public class ArtifactBuilder
 
         ref.setClassifier( classifier );
         ref.setType( type );
-        Path file = managedRepositoryContent.toFile( ref );
+        StorageAsset file = managedRepositoryContent.toFile( ref );
 
         String extension = getExtensionFromFile(file);
         
@@ -124,10 +126,10 @@ public class ArtifactBuilder
     /**
      * Extract file extension
      */
-    String getExtensionFromFile( Path file )
+    String getExtensionFromFile( StorageAsset file )
     {
         // we are just interested in the section after the last -
-        String[] parts = file.getFileName().toString().split( "-" );
+        String[] parts = file.getName().split( "-" );
         if ( parts.length > 0 )
         {
             // get anything after a dot followed by a letter a-z, including other dots
@@ -139,7 +141,7 @@ public class ArtifactBuilder
             }
         }
         // just in case
-        return FilenameUtils.getExtension( file.toFile().getName() );
+        return StorageUtil.getExtension( file );
     }
 
 }
index 1472de5d9269371da4eb334b26cb3001974d2fcc..4cd630e3c96a91f29161d4d5912caceaa0c8ac2e 100644 (file)
@@ -19,6 +19,9 @@ package org.apache.archiva.rest.services;
  */
 
 import junit.framework.TestCase;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
 import org.apache.archiva.rest.api.model.ArtifactContentEntry;
 import org.apache.archiva.test.utils.ArchivaBlockJUnit4ClassRunner;
 import org.junit.Test;
@@ -56,10 +59,11 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
-        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, null, "foo" );
+        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), null, "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
@@ -74,10 +78,12 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
-        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "", "foo" );
+        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries(
+                new FilesystemAsset(filesystemStorage, file.toString(), file), "", "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
@@ -92,10 +98,12 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
+
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
-        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "/", "foo" );
+        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(),file), "/", "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
@@ -110,10 +118,12 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
+
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
-        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( file, "org", "foo" );
+        List<ArtifactContentEntry> artifactContentEntries = browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org", "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
@@ -127,11 +137,13 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
+
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
         List<ArtifactContentEntry> artifactContentEntries =
-            browseService.readFileEntries( file, "org/apache/commons/logging/impl/", "foo" );
+            browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/impl/", "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
@@ -145,11 +157,13 @@ public class ArtifactContentEntriesTests
         throws Exception
     {
 
+        FilesystemStorage filesystemStorage = new FilesystemStorage(Paths.get(getBasedir()), new DefaultFileLockManager());
+
         Path file = Paths.get( getBasedir(),
                               "src/test/repo-with-osgi/commons-logging/commons-logging/1.1/commons-logging-1.1.jar" );
 
         List<ArtifactContentEntry> artifactContentEntries =
-            browseService.readFileEntries( file, "org/apache/commons/logging/", "foo" );
+            browseService.readFileEntries( new FilesystemAsset(filesystemStorage, file.toString(), file), "org/apache/commons/logging/", "foo" );
 
         log.info( "artifactContentEntries: {}", artifactContentEntries );
 
index f0e825b793505cb7c6b05979e14f9f55b1fcfbf2..f95099252a6faef29cc6ff0e2e2c97c8860c2957 100644 (file)
@@ -18,9 +18,15 @@ package org.apache.archiva.rest.services.utils;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.easymock.TestSubject;
 import org.junit.Test;
 
+import java.io.IOException;
+import java.nio.file.Path;
 import java.nio.file.Paths;
 
 import static org.assertj.core.api.Assertions.assertThat;
@@ -30,39 +36,39 @@ public class ArtifactBuilderTest
     @TestSubject
     private ArtifactBuilder builder = new ArtifactBuilder();
 
+    StorageAsset getFile(String path) throws IOException {
+        Path filePath = Paths.get(path);
+        FilesystemStorage filesystemStorage = new FilesystemStorage(filePath.getParent(), new DefaultFileLockManager());
+        return new FilesystemAsset(filesystemStorage, filePath.getFileName().toString(), filePath);
+    }
+
     @Test
-    public void testBuildSnapshot()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-2.3-20141119.064321-40.jar" ) ) ).isEqualTo( "jar" );
+    public void testBuildSnapshot() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-2.3-20141119.064321-40.jar" ) ) ).isEqualTo( "jar" );
     }
 
     @Test
-    public void testBuildPom()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.pom" ) ) ).isEqualTo( "pom" );
+    public void testBuildPom() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.pom" ) ) ).isEqualTo( "pom" );
     }
 
     @Test
-    public void testBuildJar()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0-sources.jar" ) ) ).isEqualTo( "jar" );
+    public void testBuildJar() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0-sources.jar" ) ) ).isEqualTo( "jar" );
     }
 
     @Test
-    public void testBuildTarGz()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.tar.gz" ) ) ).isEqualTo( "tar.gz" );
+    public void testBuildTarGz() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.tar.gz" ) ) ).isEqualTo( "tar.gz" );
     }
 
     @Test
-    public void testBuildPomZip()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.pom.zip" ) ) ).isEqualTo( "pom.zip" );
+    public void testBuildPomZip() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.pom.zip" ) ) ).isEqualTo( "pom.zip" );
     }
 
     @Test
-    public void testBuildR00()
-    {
-        assertThat( builder.getExtensionFromFile( Paths.get( "/tmp/foo-1.0.r00" ) ) ).isEqualTo( "r00" );
+    public void testBuildR00() throws IOException {
+        assertThat( builder.getExtensionFromFile( getFile( "/tmp/foo-1.0.r00" ) ) ).isEqualTo( "r00" );
     }
 }
index 5d61568e869563739bd377d9f3b1b41550c64dc6..929921aeed52ae0a624cfa3506d632fd3b82307f 100644 (file)
@@ -24,12 +24,7 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata;
 import org.apache.archiva.metadata.repository.MetadataRepository;
 import org.apache.archiva.metadata.repository.RepositorySession;
 import org.apache.archiva.filter.Filter;
-import org.apache.archiva.metadata.repository.storage.ReadMetadataRequest;
-import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
-import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
-import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
-import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
-import org.apache.archiva.metadata.repository.storage.RepositoryStorageRuntimeException;
+import org.apache.archiva.metadata.repository.storage.*;
 import org.apache.archiva.model.ArtifactReference;
 import org.apache.archiva.policies.ProxyDownloadException;
 import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
@@ -37,7 +32,9 @@ import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.events.RepositoryListener;
 import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
 import org.apache.archiva.scheduler.repository.model.RepositoryTask;
+import org.apache.archiva.xml.XMLException;
 
+import java.io.IOException;
 import java.util.Collection;
 
 /**
@@ -166,7 +163,7 @@ public class MockBeanServices
     }
 
     @Override
-    public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent )
+    public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent ) throws RelocationException, XMLException, IOException
     {
         return null;
     }
index afe40d285e7a8077124748645e7548af42ab6bc6..33ca265123be25d985ed30d4324890c2d293e737 100644 (file)
@@ -42,6 +42,7 @@ import org.apache.archiva.repository.content.ArtifactUtil;
 import org.apache.archiva.repository.metadata.MetadataTools;
 import org.apache.archiva.repository.metadata.RepositoryMetadataException;
 import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.rest.api.services.ArchivaRestServiceException;
 import org.apache.archiva.rest.services.AbstractRestService;
 import org.apache.archiva.scheduler.ArchivaTaskScheduler;
@@ -68,9 +69,7 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpSession;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
+import java.io.*;
 import java.net.URLDecoder;
 import java.nio.file.*;
 import java.text.DateFormat;
@@ -368,10 +367,10 @@ public class DefaultFileUploadService
             ArtifactReference artifactReference = createArtifactRef(fileMetadata, groupId, artifactId, version);
             artifactReference.setType(packaging);
 
-            Path pomPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
-            Path targetPath = pomPath.getParent();
+            StorageAsset pomPath = artifactUtil.getArtifactAsset(repoConfig, artifactReference);
+            StorageAsset targetPath = pomPath.getParent();
 
-            String pomFilename = pomPath.getFileName().toString();
+            String pomFilename = pomPath.getName();
             if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
                 pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
             }
@@ -408,8 +407,8 @@ public class DefaultFileUploadService
             artifactReference.setType(
                     StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging());
 
-            Path artifactPath = artifactUtil.getArtifactPath(repoConfig, artifactReference);
-            Path targetPath = artifactPath.getParent();
+            StorageAsset artifactPath = artifactUtil.getArtifactAsset(repoConfig, artifactReference);
+            StorageAsset targetPath = artifactPath.getParent();
 
             log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath);
 
@@ -417,7 +416,7 @@ public class DefaultFileUploadService
             int newBuildNumber = -1;
             String timestamp = null;
 
-            Path versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
+            StorageAsset versionMetadataFile = targetPath.resolve(MetadataTools.MAVEN_METADATA);
             ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile);
 
             if (VersionUtil.isSnapshot(version)) {
@@ -432,11 +431,11 @@ public class DefaultFileUploadService
                 }
             }
 
-            if (!Files.exists(targetPath)) {
-                Files.createDirectories(targetPath);
+            if (!targetPath.exists()) {
+                targetPath.create();
             }
 
-            String filename = artifactPath.getFileName().toString();
+            String filename = artifactPath.getName().toString();
             if (VersionUtil.isSnapshot(version)) {
                 filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber);
             }
@@ -446,8 +445,8 @@ public class DefaultFileUploadService
             // !(archivaAdministration.getKnownContentConsumers().contains("create-missing-checksums"));
 
             try {
-                Path targetFile = targetPath.resolve(filename);
-                if (Files.exists(targetFile) && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) {
+                StorageAsset targetFile = targetPath.resolve(filename);
+                if (targetFile.exists() && !VersionUtil.isSnapshot(version) && repoConfig.blocksRedeployments()) {
                     throw new ArchivaRestServiceException(
                             "Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
                             Response.Status.BAD_REQUEST.getStatusCode(), null);
@@ -471,7 +470,7 @@ public class DefaultFileUploadService
                 pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";
 
                 try {
-                    Path generatedPomFile =
+                    StorageAsset generatedPomFile =
                             createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId, version, packaging);
                     triggerAuditEvent(repoConfig.getId(), targetPath.resolve(pomFilename).toString(), AuditEvent.UPLOAD_FILE);
                     if (fixChecksums) {
@@ -487,7 +486,7 @@ public class DefaultFileUploadService
 
             // explicitly update only if metadata-updater consumer is not enabled!
             if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
-                updateProjectMetadata(targetPath.toAbsolutePath().toString(), lastUpdatedTimestamp, timestamp, newBuildNumber,
+                updateProjectMetadata(targetPath, lastUpdatedTimestamp, timestamp, newBuildNumber,
                         fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
 
                 if (VersionUtil.isSnapshot(version)) {
@@ -525,20 +524,20 @@ public class DefaultFileUploadService
         return artifactReference;
     }
 
-    private ArchivaRepositoryMetadata getMetadata(Path metadataFile)
+    private ArchivaRepositoryMetadata getMetadata(StorageAsset metadataFile)
             throws RepositoryMetadataException {
         ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
-        if (Files.exists(metadataFile)) {
+        if (metadataFile.exists()) {
             try {
                 metadata = MavenMetadataReader.read(metadataFile);
-            } catch (XMLException e) {
+            } catch (XMLException | IOException e) {
                 throw new RepositoryMetadataException(e.getMessage(), e);
             }
         }
         return metadata;
     }
 
-    private Path createPom(Path targetPath, String filename, FileMetadata fileMetadata, String groupId,
+    private StorageAsset createPom(StorageAsset targetPath, String filename, FileMetadata fileMetadata, String groupId,
                            String artifactId, String version, String packaging)
             throws IOException {
         Model projectModel = new Model();
@@ -548,22 +547,22 @@ public class DefaultFileUploadService
         projectModel.setVersion(version);
         projectModel.setPackaging(packaging);
 
-        Path pomFile = targetPath.resolve(filename);
+        StorageAsset pomFile = targetPath.resolve(filename);
         MavenXpp3Writer writer = new MavenXpp3Writer();
 
-        try (FileWriter w = new FileWriter(pomFile.toFile())) {
+        try (Writer w = new OutputStreamWriter(pomFile.getWriteStream(true))) {
             writer.write(w, projectModel);
         }
 
         return pomFile;
     }
 
-    private void fixChecksums(Path file) {
-        ChecksummedFile checksum = new ChecksummedFile(file);
+    private void fixChecksums(StorageAsset file) {
+        ChecksummedFile checksum = new ChecksummedFile(file.getFilePath());
         checksum.fixChecksums(algorithms);
     }
 
-    private void queueRepositoryTask(String repositoryId, Path localFile) {
+    private void queueRepositoryTask(String repositoryId, StorageAsset localFile) {
         RepositoryTask task = new RepositoryTask();
         task.setRepositoryId(repositoryId);
         task.setResourceFile(localFile);
@@ -574,15 +573,14 @@ public class DefaultFileUploadService
             scheduler.queueTask(task);
         } catch (TaskQueueException e) {
             log.error("Unable to queue repository task to execute consumers on resource file ['{}"
-                    + "'].", localFile.getFileName());
+                    + "'].", localFile.getName());
         }
     }
 
-    private void copyFile(Path sourceFile, Path targetPath, String targetFilename, boolean fixChecksums)
+    private void copyFile(Path sourceFile, StorageAsset targetPath, String targetFilename, boolean fixChecksums)
             throws IOException {
 
-        Files.copy(sourceFile, targetPath.resolve(targetFilename), StandardCopyOption.REPLACE_EXISTING,
-                StandardCopyOption.COPY_ATTRIBUTES);
+        targetPath.resolve(targetFilename).replaceDataFromFile(sourceFile);
 
         if (fixChecksums) {
             fixChecksums(targetPath.resolve(targetFilename));
@@ -592,19 +590,19 @@ public class DefaultFileUploadService
     /**
      * Update artifact level metadata. If it does not exist, create the metadata and fix checksums if necessary.
      */
-    private void updateProjectMetadata(String targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
+    private void updateProjectMetadata(StorageAsset targetPath, Date lastUpdatedTimestamp, String timestamp, int buildNumber,
                                        boolean fixChecksums, FileMetadata fileMetadata, String groupId,
                                        String artifactId, String version, String packaging)
             throws RepositoryMetadataException {
         List<String> availableVersions = new ArrayList<>();
         String latestVersion = version;
 
-        Path projectDir = Paths.get(targetPath).getParent();
-        Path projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA);
+        StorageAsset projectDir = targetPath.getParent();
+        StorageAsset projectMetadataFile = projectDir.resolve(MetadataTools.MAVEN_METADATA);
 
         ArchivaRepositoryMetadata projectMetadata = getMetadata(projectMetadataFile);
 
-        if (Files.exists(projectMetadataFile)) {
+        if (projectMetadataFile.exists()) {
             availableVersions = projectMetadata.getAvailableVersions();
 
             Collections.sort(availableVersions, VersionComparator.getInstance());
@@ -648,12 +646,12 @@ public class DefaultFileUploadService
      * Update version level metadata for snapshot artifacts. If it does not exist, create the metadata and fix checksums
      * if necessary.
      */
-    private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, Path metadataFile,
+    private void updateVersionMetadata(ArchivaRepositoryMetadata metadata, StorageAsset metadataFile,
                                        Date lastUpdatedTimestamp, String timestamp, int buildNumber,
                                        boolean fixChecksums, FileMetadata fileMetadata, String groupId,
                                        String artifactId, String version, String packaging)
             throws RepositoryMetadataException {
-        if (!Files.exists(metadataFile)) {
+        if (!metadataFile.exists()) {
             metadata.setGroupId(groupId);
             metadata.setArtifactId(artifactId);
             metadata.setVersion(version);
index 73dcd4a7f33bf63fc0f28e9a7f4d21722eed64ed..79a8e9def883422c0308022b3af205e03e5f4de1 100644 (file)
       <scope>runtime</scope>
       -->
     </dependency>
+    <dependency>
+      <groupId>org.apache.archiva</groupId>
+      <artifactId>archiva-storage-api</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.archiva.maven</groupId>
       <artifactId>archiva-maven-repository</artifactId>
index b3843e513f8dcaeb3419cca184685608c162272c..26ef918810ddf3ee38e9f66b69d5ef71d5bc8bd4 100644 (file)
@@ -22,8 +22,8 @@ package org.apache.archiva.webdav;
 import edu.emory.mathcs.backport.java.util.Collections;
 import org.apache.archiva.metadata.model.facets.AuditEvent;
 import org.apache.archiva.repository.LayoutException;
-import org.apache.archiva.repository.content.RepositoryStorage;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.RepositoryStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.events.AuditListener;
 import org.apache.archiva.scheduler.ArchivaTaskScheduler;
 import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
index 30e14433dc114dcccf6de7e806323722a6bc6d53..71868d10c3c979a0c6c80472bb9fb02dcb39153a 100644 (file)
@@ -25,6 +25,7 @@ import org.apache.archiva.audit.Auditable;
 import org.apache.archiva.checksum.ChecksumAlgorithm;
 import org.apache.archiva.checksum.ChecksumUtil;
 import org.apache.archiva.checksum.StreamingChecksum;
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
 import org.apache.archiva.common.utils.PathUtil;
@@ -65,8 +66,8 @@ import org.apache.archiva.repository.ReleaseScheme;
 import org.apache.archiva.repository.RepositoryGroup;
 import org.apache.archiva.repository.RepositoryRegistry;
 import org.apache.archiva.repository.RepositoryRequestInfo;
-import org.apache.archiva.repository.content.FilesystemAsset;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.repository.events.AuditListener;
 import org.apache.archiva.repository.features.IndexCreationFeature;
 import org.apache.archiva.repository.metadata.MetadataTools;
@@ -343,7 +344,7 @@ public class ArchivaDavResourceFactory
                                 ArchivaRepositoryMetadata repoMetadata = MavenMetadataReader.read( metadataFile );
                                 mergedMetadata = RepositoryMetadataMerge.merge( mergedMetadata, repoMetadata );
                             }
-                            catch ( XMLException e )
+                            catch (XMLException | IOException e )
                             {
                                 throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                                                         "Error occurred while reading metadata file." );
@@ -427,11 +428,10 @@ public class ArchivaDavResourceFactory
         {
             // we are in the case of index file request
             String requestedFileName = StringUtils.substringAfterLast( pathInfo, "/" );
-            Path temporaryIndexDirectory =
+            StorageAsset temporaryIndexDirectory =
                 buildMergedIndexDirectory( activePrincipal, request, repoGroup );
-            FilesystemAsset asset = new FilesystemAsset( pathInfo, temporaryIndexDirectory.resolve(requestedFileName) );
+            StorageAsset asset = temporaryIndexDirectory.getStorage().getAsset(requestedFileName);
 
-            Path resourceFile = temporaryIndexDirectory.resolve( requestedFileName );
             try {
                 resource = new ArchivaDavResource( asset, requestedFileName, repoGroup,
                                                    request.getRemoteAddr(), activePrincipal, request.getDavSession(),
@@ -543,7 +543,7 @@ public class ArchivaDavResourceFactory
 
             throw new BrowserRedirectException( addHrefPrefix( contextPath, path ), e.getRelocationType() );
         }
-        catch ( XMLException e )
+        catch (XMLException | IOException e )
         {
             log.error( e.getMessage(), e );
             throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e );
@@ -765,7 +765,7 @@ public class ArchivaDavResourceFactory
         RepositoryProxyHandler proxyHandler = proxyRegistry.getHandler(managedRepository.getRepository().getType()).get(0);
         if ( repositoryRequestInfo.isSupportFile( path ) )
         {
-            Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path );
+            StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path );
 
             return ( proxiedFile != null );
         }
@@ -780,7 +780,7 @@ public class ArchivaDavResourceFactory
         if ( repositoryRequestInfo.isArchetypeCatalog( path ) )
         {
             // FIXME we must implement a merge of remote archetype catalog from remote servers.
-            Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path );
+            StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, path );
 
             return ( proxiedFile != null );
         }
@@ -799,7 +799,7 @@ public class ArchivaDavResourceFactory
                     this.applicationContext.getBean( "repositoryStorage#" + repositoryLayout, RepositoryStorage.class );
                 repositoryStorage.applyServerSideRelocation( managedRepository, artifact );
 
-                Path proxiedFile = proxyHandler.fetchFromProxies( managedRepository, artifact );
+                StorageAsset proxiedFile = proxyHandler.fetchFromProxies( managedRepository, artifact );
 
                 resource.setPath( managedRepository.toPath( artifact ) );
 
@@ -1058,10 +1058,9 @@ public class ArchivaDavResourceFactory
 
             if ( StringUtils.endsWith( pathInfo, mergedIndexPath ) )
             {
-                Path mergedRepoDirPath =
+                StorageAsset mergedRepoDirPath =
                     buildMergedIndexDirectory( activePrincipal, request, repositoryGroup );
-                FilesystemAsset mergedRepoDir = new FilesystemAsset(pathInfo, mergedRepoDirPath);
-                mergedRepositoryContents.add( mergedRepoDir );
+                mergedRepositoryContents.add( mergedRepoDirPath );
             }
             else
             {
@@ -1087,8 +1086,12 @@ public class ArchivaDavResourceFactory
                             }
                         }
                     }
-                    FilesystemAsset parentDir = new FilesystemAsset(pathInfo, tmpDirectory.getParent());
-                    mergedRepositoryContents.add( parentDir );
+                    try {
+                        FilesystemStorage storage = new FilesystemStorage(tmpDirectory.getParent(), new DefaultFileLockManager());
+                        mergedRepositoryContents.add( storage.getAsset("") );
+                    } catch (IOException e) {
+                        throw new DavException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Could not create storage for " + tmpDirectory);
+                    }
                 }
                 for ( ManagedRepository repo : repositories )
                 {
@@ -1298,7 +1301,7 @@ public class ArchivaDavResourceFactory
         }
     }
 
-    protected Path buildMergedIndexDirectory( String activePrincipal,
+    protected StorageAsset buildMergedIndexDirectory( String activePrincipal,
                                               DavServletRequest request,
                                               RepositoryGroup repositoryGroup )
         throws DavException
@@ -1320,7 +1323,7 @@ public class ArchivaDavResourceFactory
             final String id = repositoryGroup.getId();
             TemporaryGroupIndex tmp = temporaryGroupIndexMap.get(id);
 
-            if ( tmp != null && tmp.getDirectory() != null && Files.exists(tmp.getDirectory()))
+            if ( tmp != null && tmp.getDirectory() != null && tmp.getDirectory().exists())
             {
                 if ( System.currentTimeMillis() - tmp.getCreationTime() > (
                     repositoryGroup.getMergedIndexTTL() * 60 * 1000 ) )
@@ -1370,12 +1373,14 @@ public class ArchivaDavResourceFactory
             {
                 Path tempRepoFile = Files.createTempDirectory( "temp" );
                 tempRepoFile.toFile( ).deleteOnExit( );
+                FilesystemStorage storage = new FilesystemStorage(tempRepoFile, new DefaultFileLockManager());
+                StorageAsset tmpAsset = storage.getAsset("");
 
                 IndexMergerRequest indexMergerRequest =
                     new IndexMergerRequest( authzRepos, true, id,
                         indexPath.toString( ),
                         repositoryGroup.getMergedIndexTTL( ) ).mergedIndexDirectory(
-                        tempRepoFile ).temporary( true );
+                        tmpAsset ).temporary( true );
 
                 MergedRemoteIndexesTaskRequest taskRequest =
                     new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
@@ -1384,7 +1389,7 @@ public class ArchivaDavResourceFactory
 
                 ArchivaIndexingContext indexingContext = job.execute( ).getIndexingContext( );
 
-                Path mergedRepoDir = Paths.get( indexingContext.getPath( ) );
+                StorageAsset mergedRepoDir = indexingContext.getPath( );
                 TemporaryGroupIndex temporaryGroupIndex =
                     new TemporaryGroupIndex( mergedRepoDir, indexingContext.getId( ), id,
                         repositoryGroup.getMergedIndexTTL( ) ) //
index 586eebc88565cb7df4faefc689ca453e9f0c777a..ca996f526eaa6304ad3e501ef3e44f2af52e4488 100644 (file)
@@ -19,8 +19,7 @@ package org.apache.archiva.webdav;
  * under the License.
  */
 
-import org.apache.archiva.repository.ManagedRepositoryContent;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.webdav.util.IndexWriter;
 import org.apache.archiva.webdav.util.MimeTypes;
 import org.apache.jackrabbit.util.Text;
@@ -48,9 +47,6 @@ import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
 
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
 import java.util.*;
 import java.util.stream.Collectors;
 
index 8e09cd08c3652cdf1e54ba0a3c9ab08255f237c8..800b81b26a7c31b73a8a1281e597044bb887fa9b 100644 (file)
@@ -26,7 +26,7 @@ import org.apache.archiva.configuration.ConfigurationListener;
 import org.apache.archiva.redback.integration.filter.authentication.HttpAuthenticator;
 import org.apache.archiva.repository.ManagedRepository;
 import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.security.ServletAuthenticator;
 import org.apache.jackrabbit.webdav.DavException;
 import org.apache.jackrabbit.webdav.DavLocatorFactory;
@@ -51,9 +51,6 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
index 3aa9b387b40916eb869b1ad12fb1536a9aef358d..2763d4d4c5fafadf6f09a31d47cc6892381d3eab 100644 (file)
@@ -19,10 +19,8 @@ package org.apache.archiva.webdav.util;
  * under the License.
  */
 
-import org.apache.archiva.repository.ManagedRepositoryContent;
-import org.apache.archiva.repository.content.StorageAsset;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.lang.StringUtils;
-import org.apache.jackrabbit.webdav.DavResource;
 import org.apache.jackrabbit.webdav.io.OutputContext;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
index 02c4125a409876526dd0ffddd9ccbca873d68474..1cfc42fb57c87f3c2f91e9b89bd5fde20b4286c6 100644 (file)
@@ -75,7 +75,7 @@ public class TemporaryGroupIndexSessionCleaner
         for ( TemporaryGroupIndex temporaryGroupIndex : tempFilesPerKey.values() )
         {
             log.info( "cleanup temporaryGroupIndex {} directory {}", temporaryGroupIndex.getIndexId(),
-                      temporaryGroupIndex.getDirectory().toAbsolutePath() );
+                      temporaryGroupIndex.getDirectory().getPath() );
             getIndexMerger( httpSessionEvent ).cleanTemporaryGroupIndex( temporaryGroupIndex );
         }
     }
index 6eb286729998dc13e2e6101a21ef06eb5e861e3d..71505721270823d2b420556c74d2a0f059872714 100644 (file)
@@ -24,7 +24,7 @@ import org.apache.archiva.common.filelock.FileLockManager;
 import org.apache.archiva.common.utils.FileUtils;
 import org.apache.archiva.repository.LayoutException;
 import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.content.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemAsset;
 import org.apache.archiva.repository.events.AuditListener;
 import org.apache.archiva.repository.maven2.MavenManagedRepository;
 import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
@@ -127,7 +127,7 @@ public class DavResourceTest
 
     private DavResource getDavResource( String logicalPath, Path file ) throws LayoutException
     {
-        return new ArchivaDavResource( new FilesystemAsset( logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator,
+        return new ArchivaDavResource( new FilesystemAsset( repository, logicalPath, file.toAbsolutePath()) , logicalPath, repository, session, resourceLocator,
                                        resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null);
     }
 
@@ -349,7 +349,7 @@ public class DavResourceTest
         {
             try
             {
-                return new ArchivaDavResource( new FilesystemAsset( "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator,
+                return new ArchivaDavResource( new FilesystemAsset(repository, "/" , baseDir.toAbsolutePath()), "/", repository, session, resourceLocator,
                                                resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
                                                null );
             }
index 75f6726b7107f0f40c84237054ef01f51772b09f..1343c9452ceeee86ba2e525c20ab14eacf88ad74 100644 (file)
@@ -22,6 +22,7 @@ package org.apache.archiva.webdav;
 import org.apache.archiva.proxy.maven.MavenRepositoryProxyHandler;
 import org.apache.archiva.proxy.model.ProxyFetchResult;
 import org.apache.archiva.repository.ManagedRepositoryContent;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.commons.io.FileUtils;
 
 import java.io.IOException;
@@ -40,10 +41,10 @@ class OverridingRepositoryProxyHandler
     @Override
     public ProxyFetchResult fetchMetadataFromProxies(ManagedRepositoryContent repository, String logicalPath )
     {
-        Path target = Paths.get(repository.getRepoRoot(), logicalPath );
+        StorageAsset target = repository.getRepository().getAsset( logicalPath );
         try
         {
-            FileUtils.copyFile( archivaDavResourceFactoryTest.getProjectBase().resolve( "target/test-classes/maven-metadata.xml" ).toFile(), target.toFile() );
+            FileUtils.copyFile( archivaDavResourceFactoryTest.getProjectBase().resolve( "target/test-classes/maven-metadata.xml" ).toFile(), target.getFilePath().toFile() );
         }
         catch ( IOException e )
         {
index 77bcb9ec010c2fa67a895ff2a729cd70c16d6e5d..50071593ae485af15d4a22ddbbd609cfcbb5d738 100644 (file)
@@ -20,6 +20,7 @@ package org.apache.archiva.metadata.repository.storage;
  */
 
 import org.apache.archiva.metadata.model.ArtifactMetadata;
+import org.apache.archiva.repository.storage.StorageAsset;
 
 import java.nio.file.Path;
 
@@ -31,13 +32,13 @@ public interface RepositoryPathTranslator
 
     String toPath( String namespace, String projectId );
 
-    Path toFile( Path basedir, String namespace, String projectId, String projectVersion, String filename );
+    StorageAsset toFile(StorageAsset basedir, String namespace, String projectId, String projectVersion, String filename );
 
-    Path toFile( Path basedir, String namespace, String projectId );
+    StorageAsset toFile(StorageAsset basedir, String namespace, String projectId );
 
-    Path toFile(Path basedir, String namespace );
+    StorageAsset toFile(StorageAsset basedir, String namespace );
 
-    Path toFile( Path basedir, String namespace, String projectId, String projectVersion );
+    StorageAsset toFile( StorageAsset basedir, String namespace, String projectId, String projectVersion );
 
     ArtifactMetadata getArtifactForPath( String repoId, String relativePath );
 
index cb205101146b3c5f1efc52de3d293f3207dd1803..6fb46d676427341621438169a354f50b713bd5da 100644 (file)
@@ -28,6 +28,7 @@ import org.apache.archiva.policies.ProxyDownloadException;
 import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.xml.XMLException;
 
+import java.io.IOException;
 import java.util.Collection;
 
 // FIXME: we should drop the repositoryId parameters and attach this to an instance of a repository storage
@@ -83,7 +84,7 @@ public interface RepositoryStorage
     String getFilePath( String requestPath, org.apache.archiva.repository.ManagedRepository managedRepository );
 
     String getFilePathWithVersion( final String requestPath, ManagedRepositoryContent managedRepositoryContent )
-        throws RelocationException, XMLException;
+            throws RelocationException, XMLException, IOException;
 
 
 }
index 2cd68f41b3f174a0cf05a860040ba96e97781eea..3ebed1a4999f3fada84d8d89cfd054a0a1638ab6 100644 (file)
@@ -38,6 +38,7 @@ import org.apache.archiva.repository.ManagedRepositoryContent;
 import org.apache.archiva.repository.events.RepositoryListener;
 import org.apache.archiva.xml.XMLException;
 
+import java.io.IOException;
 import java.util.Collection;
 
 /**
@@ -118,7 +119,7 @@ public class MockRepositoryStorage
 
     @Override
     public String getFilePathWithVersion( String requestPath, ManagedRepositoryContent managedRepositoryContent )
-        throws RelocationException, XMLException
+            throws RelocationException, XMLException, IOException
     {
         return null;
     }
index 049f3a71904da1a07f825869be8fc261327ff7c4..7b8c6423039461e7bc3c0d36cda01990c3241102 100644 (file)
@@ -19,6 +19,7 @@ package org.apache.archiva.stagerepository.merge;
  * under the License.
  */
 
+import org.apache.archiva.common.filelock.DefaultFileLockManager;
 import org.apache.archiva.common.utils.VersionComparator;
 import org.apache.archiva.common.utils.VersionUtil;
 import org.apache.archiva.configuration.ArchivaConfiguration;
@@ -34,6 +35,9 @@ import org.apache.archiva.model.ArchivaRepositoryMetadata;
 import org.apache.archiva.repository.RepositoryException;
 import org.apache.archiva.repository.metadata.RepositoryMetadataException;
 import org.apache.archiva.repository.metadata.RepositoryMetadataWriter;
+import org.apache.archiva.repository.storage.FilesystemAsset;
+import org.apache.archiva.repository.storage.FilesystemStorage;
+import org.apache.archiva.repository.storage.StorageAsset;
 import org.apache.archiva.xml.XMLException;
 import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
@@ -42,7 +46,10 @@ import org.springframework.stereotype.Service;
 
 import javax.inject.Inject;
 import javax.inject.Named;
+import java.io.BufferedWriter;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.nio.Buffer;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -227,20 +234,22 @@ public class Maven2RepositoryMerger
         {
 
             // updating version metadata files
-            Path versionMetaDataFileInSourceRepo =
-                pathTranslator.toFile( Paths.get( sourceRepoPath ), artifactMetadata.getNamespace(),
+            FilesystemStorage fsStorage = new FilesystemStorage(Paths.get(sourceRepoPath), new DefaultFileLockManager());
+
+            StorageAsset versionMetaDataFileInSourceRepo =
+                pathTranslator.toFile( new FilesystemAsset(fsStorage, "", Paths.get(sourceRepoPath)), artifactMetadata.getNamespace(),
                                        artifactMetadata.getProject(), artifactMetadata.getVersion(),
                                        METADATA_FILENAME );
 
-            if ( Files.exists(versionMetaDataFileInSourceRepo) )
+            if ( versionMetaDataFileInSourceRepo.exists() )
             {//Pattern quote for windows path
                 String relativePathToVersionMetadataFile =
-                    versionMetaDataFileInSourceRepo.toAbsolutePath().toString().split( Pattern.quote( sourceRepoPath ) )[1];
+                    versionMetaDataFileInSourceRepo.getPath().toString().split( Pattern.quote( sourceRepoPath ) )[1];
                 Path versionMetaDataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToVersionMetadataFile );
 
                 if ( !Files.exists(versionMetaDataFileInTargetRepo) )
                 {
-                    copyFile( versionMetaDataFileInSourceRepo, versionMetaDataFileInTargetRepo );
+                    copyFile( versionMetaDataFileInSourceRepo.getFilePath(), versionMetaDataFileInTargetRepo );
                 }
                 else
                 {
@@ -250,19 +259,19 @@ public class Maven2RepositoryMerger
             }
 
             // updating project meta data file
-            Path projectDirectoryInSourceRepo = versionMetaDataFileInSourceRepo.getParent().getParent();
-            Path projectMetadataFileInSourceRepo = projectDirectoryInSourceRepo.resolve(METADATA_FILENAME );
+            StorageAsset projectDirectoryInSourceRepo = versionMetaDataFileInSourceRepo.getParent().getParent();
+            StorageAsset projectMetadataFileInSourceRepo = projectDirectoryInSourceRepo.resolve(METADATA_FILENAME );
 
-            if ( Files.exists(projectMetadataFileInSourceRepo) )
+            if ( projectMetadataFileInSourceRepo.exists() )
             {
                 String relativePathToProjectMetadataFile =
-                    projectMetadataFileInSourceRepo.toAbsolutePath().toString().split( Pattern.quote( sourceRepoPath ) )[1];
+                    projectMetadataFileInSourceRepo.getPath().split( Pattern.quote( sourceRepoPath ) )[1];
                 Path projectMetadataFileInTargetRepo = Paths.get( targetRepoPath, relativePathToProjectMetadataFile );
 
                 if ( !Files.exists(projectMetadataFileInTargetRepo) )
                 {
 
-                    copyFile( projectMetadataFileInSourceRepo, projectMetadataFileInTargetRepo );
+                    copyFile( projectMetadataFileInSourceRepo.getFilePath(), projectMetadataFileInTargetRepo );
                 }
                 else
                 {
@@ -331,7 +340,11 @@ public class Maven2RepositoryMerger
             projectMetadata.setReleasedVersion( latestVersion );
         }
 
-        RepositoryMetadataWriter.write( projectMetadata, projectMetaDataFileIntargetRepo );
+        try(BufferedWriter writer = Files.newBufferedWriter(projectMetaDataFileIntargetRepo)) {
+            RepositoryMetadataWriter.write( projectMetadata, writer );
+        } catch (IOException e) {
+            throw new RepositoryMetadataException(e);
+        }
 
     }
 
@@ -348,7 +361,11 @@ public class Maven2RepositoryMerger
         }
 
         versionMetadata.setLastUpdatedTimestamp( lastUpdatedTimestamp );
-        RepositoryMetadataWriter.write( versionMetadata, versionMetaDataFileInTargetRepo );
+        try(BufferedWriter writer = Files.newBufferedWriter(versionMetaDataFileInTargetRepo) ) {
+            RepositoryMetadataWriter.write( versionMetadata, writer);
+        } catch (IOException e) {
+            throw new RepositoryMetadataException(e);
+        }
     }
 
     private ArchivaRepositoryMetadata getMetadata( Path metadataFile )
@@ -361,7 +378,7 @@ public class Maven2RepositoryMerger
             {
                 metadata = MavenMetadataReader.read( metadataFile );
             }
-            catch ( XMLException e )
+            catch (XMLException | IOException e )
             {
                 throw new RepositoryMetadataException( e.getMessage(), e );
             }
diff --git a/pom.xml b/pom.xml
index 8a1ae2a871b3cedcca7942f0a359f10a7e6e971a..f4dd5589f24f42b825604b608de27433565c5978 100644 (file)
--- a/pom.xml
+++ b/pom.xml
         <artifactId>archiva-metadata-consumer</artifactId>
         <version>${project.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.apache.archiva</groupId>
+        <artifactId>archiva-storage-api</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.archiva</groupId>
+        <artifactId>archiva-storage-fs</artifactId>
+        <version>${project.version}</version>
+      </dependency>
       <dependency>
         <groupId>org.apache.archiva</groupId>
         <artifactId>test-repository</artifactId>